From 7b558540d1efb616406fa2d1aa416c4ee9616def Mon Sep 17 00:00:00 2001
From: hyb <kk_huangyangbo@163.com>
Date: Wed, 07 Jan 2026 09:26:27 +0000
Subject: [PATCH] 更新华东师范大学二期的并发入驻笼位,新增直接通过数据库获取使用人ID和对应课题组信息
---
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE | 201
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/WHEEL | 4
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/_helpers_py.py | 62
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/winterm.py | 195
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/_collections.py | 487
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/wait.py | 124
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/__init__.py | 7
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_notebook.py | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/METADATA | 72
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/url.py | 469
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA | 112
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/compat.py | 106
测试组/脚本/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/WHEEL | 4
测试组/脚本/Change_password/venv_build/Lib/site-packages/typing_extensions.py | 4317 ++
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict/_abc.py | 73
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/bells.py | 26
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/REQUESTED | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/intranges.py | 57
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/util.py | 42
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/codec.py | 122
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/package_data.py | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/cd.py | 395
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/utils.py | 49
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/sessions.py | 831
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.pyx | 162
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD | 139
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/gui.py | 179
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/payload_streamer.py | 78
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/utils.py | 1086
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/COMMAND.py | 32
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/converters.py | 363
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_fileresponse.py | 418
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/py.typed | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/initialise_test.py | 189
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/METADATA | 133
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/compat.py | 15
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/utils.py | 399
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.pyx | 148
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/__init__.py | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict/__init__.py | 60
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/__init__.py | 72
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/autonotebook.py | 29
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/http_parser.py | 1086
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/payload.py | 1120
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/__init__.py | 16
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_middlewares.py | 121
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/helpers.py | 986
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/LICENSE | 202
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA | 123
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/typedefs.py | 69
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client_reqrep.py | 1536
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/streams.py | 758
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client_middlewares.py | 55
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/METADATA | 441
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/impl.py | 259
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_monitor.py | 95
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/LICENSE | 202
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/ansi_test.py | 76
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/core.py | 437
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/concurrent.py | 105
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/discord.py | 156
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/py.typed | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_make.py | 3362 +
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/cli.py | 324
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE | 20
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/py.typed | 2
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.py | 478
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/api.py | 8
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/isatty_test.py | 57
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_log.py | 216
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/FIELD_TYPE.py | 31
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/certs.py | 17
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/py.typed | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/notebook.py | 317
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA | 672
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/WHEEL | 4
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/setters.py | 79
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_quoting_py.py | 213
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE | 201
测试组/脚本/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/RECORD | 7
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/http_websocket.py | 36
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/_staggered.py | 207
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/http_writer.py | 378
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_quoting.py | 19
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_routedef.py | 214
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE | 21
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/filters.py | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/__init__.py | 45
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/http2/__init__.py | 53
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_request.py | 914
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_url.py | 1622
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_cparser.pxd | 158
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/__init__.py | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/completion.sh | 19
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/request.py | 263
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/api.py | 669
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/CR.py | 79
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/winterm_test.py | 131
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/hdrs.py.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/converters.pyi | 19
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_funcs.py | 497
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/response.py | 101
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/api.py | 157
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/__init__.py | 184
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/formdata.py | 179
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_exceptions.py | 452
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_typing_compat.pyi | 15
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/constant.py | 2015 +
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/http2/probe.py | 87
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/models.py | 360
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/RECORD | 31
测试组/脚本/造数脚本2/华东师范大学二期/并发入驻笼位.py | 110
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/METADATA | 443
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/version.py | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/request.py | 22
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/connection.py | 137
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/err.py | 150
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/__init__.py | 48
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/connection.py | 259
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/__init__.pyi | 314
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/converters.py | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt | 2
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_py.py | 478
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/__init__.py | 104
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_cookie_helpers.py | 338
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/licenses/LICENSE | 175
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_server.py | 84
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/RECORD | 55
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/cookies.py | 561
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_find_header.pxd | 2
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/filters.py | 72
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict/_compat.py | 15
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/connector.py | 1842 +
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD | 16
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/proxy.py | 43
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_app.py | 620
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/exceptions.py | 151
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA | 764
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_headers.pxi | 83
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/licenses/LICENSE.txt | 21
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.py | 86
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/idnadata.py | 4309 ++
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md | 31
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/__init__.py | 42
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA | 262
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_main.py | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist/py.typed | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/adapters.py | 696
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/exceptions.py | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/validators.py | 748
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/METADATA | 235
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE | 279
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/help.py | 134
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/RECORD | 18
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm.py | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/timeout.py | 275
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/charset.py | 217
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE | 22
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/telegram.py | 153
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/__init__.py | 38
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_version_info.pyi | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/filepost.py | 89
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/worker.py | 255
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/tqdm.1 | 314
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/optionfile.py | 21
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_ws.py | 631
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/pytest_plugin.py | 444
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_match_hostname.py | 159
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/__main__.py | 6
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/tk.py | 196
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/__version__.py | 14
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/dask.py | 44
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_cmp.py | 160
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/REQUESTED | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/METADATA | 156
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi/__init__.py | 4
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_.py | 527
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/tcp_helpers.py | 37
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/RECORD | 22
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict/_multidict_py.py | 1242
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__main__.py | 381
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/exceptions.pyi | 17
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt | 13
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/ansi.py | 102
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/METADATA | 2478 +
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/exceptions.py | 335
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/ansitowin32.py | 277
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__init__.py | 8
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client_proto.py | 359
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/RECORD | 79
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/http_exceptions.py | 116
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/rich.py | 151
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/_internal_utils.py | 50
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/logging.py | 126
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/writer.py | 262
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi/cacert.pem | 4468 ++
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_gui.py | 9
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/METADATA | 131
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/cursors.py | 531
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_response.py | 856
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_config.py | 31
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pyx | 48
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/retry.py | 533
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/_base_connection.py | 165
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/__main__.py | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi/__main__.py | 12
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_cmp.pyi | 13
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna/uts46data.py | 8841 ++++
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/hooks.py | 33
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/itertools.py | 35
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/status_codes.py | 128
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/licenses/LICENSE | 19
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/keras.py | 122
测试组/脚本/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE | 279
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/py.typed | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/socks.py | 228
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_protocol.py | 792
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pxd | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/SERVER_STATUS.py | 10
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/packages.py | 23
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/validators.pyi | 140
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/http.py | 72
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD | 14
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/FLAG.py | 15
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_next_gen.py | 674
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/initialise.py | 121
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/std.py | 1524
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/_auth.py | 272
测试组/脚本/Change_password/venv_build/Scripts/tqdm.exe | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/__init__.py | 278
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_path.py | 41
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE | 21
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/NOTICE | 13
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/ER.py | 477
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js | 110
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD | 35
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/response.py | 277
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.py | 635
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_query.py | 121
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web.py | 592
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/__init__.py | 14
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/__init__.py | 14
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_utils.py | 11
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_compat.py | 99
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/RECORD | 43
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL | 4
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader.py | 31
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict/_multidict.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi/core.py | 83
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_urldispatcher.py | 1305
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_parse.py | 203
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/setters.py | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/CLIENT.py | 38
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/compression_utils.py | 348
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/REQUESTED | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/web_runner.py | 399
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_dist_ver.py | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/fetch.py | 726
测试组/脚本/Change_password/venv_build/Scripts/normalizer.exe | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/__init__.pyi | 389
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/connections.py | 1435
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/_version_info.py | 89
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/RECORD | 75
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/log.py | 8
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/NOTICE | 13
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/times.py | 20
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.pyi | 47
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/__init__.py | 92
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client_exceptions.py | 421
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/_version.py | 34
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE | 13
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/models.py | 84
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/util/ssltransport.py | 271
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/RECORD | 43
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal/__init__.py | 59
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/exceptions.py | 95
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/version.py | 8
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.pyx | 451
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/helpers.py | 147
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/top_level.txt | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.pyx | 835
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/tracing.py | 455
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt | 27
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/LICENCE | 49
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/abc.py | 268
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/__init__.py | 211
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/utils.py | 97
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/_helpers.py | 39
测试组/脚本/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD | 12
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/fields.py | 341
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/resolver.py | 274
测试组/脚本/Change_password/venv_build/Lib/site-packages/attrs/validators.py | 3
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/constants/__init__.py | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/RECORD | 16
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA | 78
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/__init__.py | 32
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/models.py | 1039
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/slack.py | 120
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.pxd | 110
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client.py | 1635
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/utils.py | 414
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/_request_methods.py | 278
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/contrib/utils_worker.py | 38
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client_ws.py | 428
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/__init__.py | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_pandas.py | 24
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/METADATA | 1594
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/contrib/pyopenssl.py | 564
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/auth.py | 314
测试组/脚本/Change_password/venv_build/Lib/site-packages/requests/structures.py | 99
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/INSTALLER | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/entry_points.txt | 2
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/tests/ansitowin32_test.py | 294
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/base_protocol.py | 100
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/connection.py | 1099
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl/_quoters.py | 33
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/connectionpool.py | 1178
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/http2/connection.py | 356
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.pyx | 103
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/METADATA | 149
测试组/脚本/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/METADATA | 209
测试组/脚本/Change_password/venv_build/Lib/site-packages/charset_normalizer/legacy.py | 80
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/asyncio.py | 93
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/multipart.py | 1152
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/poolmanager.py | 651
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/converters.py | 162
测试组/脚本/Change_password/venv_build/Lib/site-packages/urllib3/response.py | 1476
测试组/脚本/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/types.py | 17
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/cookiejar.py | 522
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL | 4
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/test_utils.py | 774
测试组/脚本/Change_password/venv_build/Lib/site-packages/colorama/win32.py | 180
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/client_middleware_digest_auth.py | 480
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/setters.pyi | 20
测试组/脚本/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/WHEEL | 5
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/REQUESTED | 0
测试组/脚本/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/RECORD | 26
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/__init__.py | 183
测试组/脚本/Change_password/venv_build/Lib/site-packages/aiohttp/hdrs.py | 121
测试组/脚本/Change_password/venv_build/Lib/site-packages/multidict/py.typed | 1
测试组/脚本/Change_password/venv_build/Lib/site-packages/pymysql/protocol.py | 356
测试组/脚本/Change_password/venv_build/Lib/site-packages/tqdm/auto.py | 40
测试组/脚本/Change_password/venv_build/Lib/site-packages/attr/filters.pyi | 6
403 files changed, 106,844 insertions(+), 6 deletions(-)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE"
new file mode 100644
index 0000000..f26bcf4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE"
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA"
new file mode 100644
index 0000000..c632040
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA"
@@ -0,0 +1,123 @@
+Metadata-Version: 2.3
+Name: aiohappyeyeballs
+Version: 2.6.1
+Summary: Happy Eyeballs for asyncio
+License: PSF-2.0
+Author: J. Nick Koston
+Author-email: nick@koston.org
+Requires-Python: >=3.9
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Project-URL: Bug Tracker, https://github.com/aio-libs/aiohappyeyeballs/issues
+Project-URL: Changelog, https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md
+Project-URL: Documentation, https://aiohappyeyeballs.readthedocs.io
+Project-URL: Repository, https://github.com/aio-libs/aiohappyeyeballs
+Description-Content-Type: text/markdown
+
+# aiohappyeyeballs
+
+<p align="center">
+ <a href="https://github.com/aio-libs/aiohappyeyeballs/actions/workflows/ci.yml?query=branch%3Amain">
+ <img src="https://img.shields.io/github/actions/workflow/status/aio-libs/aiohappyeyeballs/ci-cd.yml?branch=main&label=CI&logo=github&style=flat-square" alt="CI Status" >
+ </a>
+ <a href="https://aiohappyeyeballs.readthedocs.io">
+ <img src="https://img.shields.io/readthedocs/aiohappyeyeballs.svg?logo=read-the-docs&logoColor=fff&style=flat-square" alt="Documentation Status">
+ </a>
+ <a href="https://codecov.io/gh/aio-libs/aiohappyeyeballs">
+ <img src="https://img.shields.io/codecov/c/github/aio-libs/aiohappyeyeballs.svg?logo=codecov&logoColor=fff&style=flat-square" alt="Test coverage percentage">
+ </a>
+</p>
+<p align="center">
+ <a href="https://python-poetry.org/">
+ <img src="https://img.shields.io/badge/packaging-poetry-299bd7?style=flat-square&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAASCAYAAABrXO8xAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAJJSURBVHgBfZLPa1NBEMe/s7tNXoxW1KJQKaUHkXhQvHgW6UHQQ09CBS/6V3hKc/AP8CqCrUcpmop3Cx48eDB4yEECjVQrlZb80CRN8t6OM/teagVxYZi38+Yz853dJbzoMV3MM8cJUcLMSUKIE8AzQ2PieZzFxEJOHMOgMQQ+dUgSAckNXhapU/NMhDSWLs1B24A8sO1xrN4NECkcAC9ASkiIJc6k5TRiUDPhnyMMdhKc+Zx19l6SgyeW76BEONY9exVQMzKExGKwwPsCzza7KGSSWRWEQhyEaDXp6ZHEr416ygbiKYOd7TEWvvcQIeusHYMJGhTwF9y7sGnSwaWyFAiyoxzqW0PM/RjghPxF2pWReAowTEXnDh0xgcLs8l2YQmOrj3N7ByiqEoH0cARs4u78WgAVkoEDIDoOi3AkcLOHU60RIg5wC4ZuTC7FaHKQm8Hq1fQuSOBvX/sodmNJSB5geaF5CPIkUeecdMxieoRO5jz9bheL6/tXjrwCyX/UYBUcjCaWHljx1xiX6z9xEjkYAzbGVnB8pvLmyXm9ep+W8CmsSHQQY77Zx1zboxAV0w7ybMhQmfqdmmw3nEp1I0Z+FGO6M8LZdoyZnuzzBdjISicKRnpxzI9fPb+0oYXsNdyi+d3h9bm9MWYHFtPeIZfLwzmFDKy1ai3p+PDls1Llz4yyFpferxjnyjJDSEy9CaCx5m2cJPerq6Xm34eTrZt3PqxYO1XOwDYZrFlH1fWnpU38Y9HRze3lj0vOujZcXKuuXm3jP+s3KbZVra7y2EAAAAAASUVORK5CYII=" alt="Poetry">
+ </a>
+ <a href="https://github.com/astral-sh/ruff">
+ <img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff">
+ </a>
+ <a href="https://github.com/pre-commit/pre-commit">
+ <img src="https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=flat-square" alt="pre-commit">
+ </a>
+</p>
+<p align="center">
+ <a href="https://pypi.org/project/aiohappyeyeballs/">
+ <img src="https://img.shields.io/pypi/v/aiohappyeyeballs.svg?logo=python&logoColor=fff&style=flat-square" alt="PyPI Version">
+ </a>
+ <img src="https://img.shields.io/pypi/pyversions/aiohappyeyeballs.svg?style=flat-square&logo=python&logoColor=fff" alt="Supported Python versions">
+ <img src="https://img.shields.io/pypi/l/aiohappyeyeballs.svg?style=flat-square" alt="License">
+</p>
+
+---
+
+**Documentation**: <a href="https://aiohappyeyeballs.readthedocs.io" target="_blank">https://aiohappyeyeballs.readthedocs.io </a>
+
+**Source Code**: <a href="https://github.com/aio-libs/aiohappyeyeballs" target="_blank">https://github.com/aio-libs/aiohappyeyeballs </a>
+
+---
+
+[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
+([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
+
+## Use case
+
+This library exists to allow connecting with
+[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
+([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
+when you
+already have a list of addrinfo and not a DNS name.
+
+The stdlib version of `loop.create_connection()`
+will only work when you pass in an unresolved name which
+is not a good fit when using DNS caching or resolving
+names via another method such as `zeroconf`.
+
+## Installation
+
+Install this via pip (or your favourite package manager):
+
+`pip install aiohappyeyeballs`
+
+## License
+
+[aiohappyeyeballs is licensed under the same terms as cpython itself.](https://github.com/python/cpython/blob/main/LICENSE)
+
+## Example usage
+
+```python
+
+addr_infos = await loop.getaddrinfo("example.org", 80)
+
+socket = await start_connection(addr_infos)
+socket = await start_connection(addr_infos, local_addr_infos=local_addr_infos, happy_eyeballs_delay=0.2)
+
+transport, protocol = await loop.create_connection(
+ MyProtocol, sock=socket, ...)
+
+# Remove the first address for each family from addr_info
+pop_addr_infos_interleave(addr_info, 1)
+
+# Remove all matching address from addr_info
+remove_addr_infos(addr_info, "dead::beef::")
+
+# Convert a local_addr to local_addr_infos
+local_addr_infos = addr_to_addr_infos(("127.0.0.1",0))
+```
+
+## Credits
+
+This package contains code from cpython and is licensed under the same terms as cpython itself.
+
+This package was created with
+[Copier](https://copier.readthedocs.io/) and the
+[browniebroke/pypackage-template](https://github.com/browniebroke/pypackage-template)
+project template.
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD"
new file mode 100644
index 0000000..1b6cda7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD"
@@ -0,0 +1,16 @@
+aiohappyeyeballs-2.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+aiohappyeyeballs-2.6.1.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
+aiohappyeyeballs-2.6.1.dist-info/METADATA,sha256=NSXlhJwAfi380eEjAo7BQ4P_TVal9xi0qkyZWibMsVM,5915
+aiohappyeyeballs-2.6.1.dist-info/RECORD,,
+aiohappyeyeballs-2.6.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
+aiohappyeyeballs/__init__.py,sha256=x7kktHEtaD9quBcWDJPuLeKyjuVAI-Jj14S9B_5hcTs,361
+aiohappyeyeballs/__pycache__/__init__.cpython-312.pyc,,
+aiohappyeyeballs/__pycache__/_staggered.cpython-312.pyc,,
+aiohappyeyeballs/__pycache__/impl.cpython-312.pyc,,
+aiohappyeyeballs/__pycache__/types.cpython-312.pyc,,
+aiohappyeyeballs/__pycache__/utils.cpython-312.pyc,,
+aiohappyeyeballs/_staggered.py,sha256=edfVowFx-P-ywJjIEF3MdPtEMVODujV6CeMYr65otac,6900
+aiohappyeyeballs/impl.py,sha256=Dlcm2mTJ28ucrGnxkb_fo9CZzLAkOOBizOt7dreBbXE,9681
+aiohappyeyeballs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+aiohappyeyeballs/types.py,sha256=YZJIAnyoV4Dz0WFtlaf_OyE4EW7Xus1z7aIfNI6tDDQ,425
+aiohappyeyeballs/utils.py,sha256=on9GxIR0LhEfZu8P6Twi9hepX9zDanuZM20MWsb3xlQ,3028
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL"
new file mode 100644
index 0000000..0582547
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL"
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 2.1.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/__init__.py"
new file mode 100644
index 0000000..71c689c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/__init__.py"
@@ -0,0 +1,14 @@
+__version__ = "2.6.1"
+
+from .impl import start_connection
+from .types import AddrInfoType, SocketFactoryType
+from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos
+
+__all__ = (
+ "AddrInfoType",
+ "SocketFactoryType",
+ "addr_to_addr_infos",
+ "pop_addr_infos_interleave",
+ "remove_addr_infos",
+ "start_connection",
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/_staggered.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/_staggered.py"
new file mode 100644
index 0000000..9a4ba72
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/_staggered.py"
@@ -0,0 +1,207 @@
+import asyncio
+import contextlib
+
+# PY3.9: Import Callable from typing until we drop Python 3.9 support
+# https://github.com/python/cpython/issues/87131
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+_T = TypeVar("_T")
+
+RE_RAISE_EXCEPTIONS = (SystemExit, KeyboardInterrupt)
+
+
+def _set_result(wait_next: "asyncio.Future[None]") -> None:
+ """Set the result of a future if it is not already done."""
+ if not wait_next.done():
+ wait_next.set_result(None)
+
+
+async def _wait_one(
+ futures: "Iterable[asyncio.Future[Any]]",
+ loop: asyncio.AbstractEventLoop,
+) -> _T:
+ """Wait for the first future to complete."""
+ wait_next = loop.create_future()
+
+ def _on_completion(fut: "asyncio.Future[Any]") -> None:
+ if not wait_next.done():
+ wait_next.set_result(fut)
+
+ for f in futures:
+ f.add_done_callback(_on_completion)
+
+ try:
+ return await wait_next
+ finally:
+ for f in futures:
+ f.remove_done_callback(_on_completion)
+
+
+async def staggered_race(
+ coro_fns: Iterable[Callable[[], Awaitable[_T]]],
+ delay: Optional[float],
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> Tuple[Optional[_T], Optional[int], List[Optional[BaseException]]]:
+ """
+ Run coroutines with staggered start times and take the first to finish.
+
+ This method takes an iterable of coroutine functions. The first one is
+ started immediately. From then on, whenever the immediately preceding one
+ fails (raises an exception), or when *delay* seconds has passed, the next
+ coroutine is started. This continues until one of the coroutines complete
+ successfully, in which case all others are cancelled, or until all
+ coroutines fail.
+
+ The coroutines provided should be well-behaved in the following way:
+
+ * They should only ``return`` if completed successfully.
+
+ * They should always raise an exception if they did not complete
+ successfully. In particular, if they handle cancellation, they should
+ probably reraise, like this::
+
+ try:
+ # do work
+ except asyncio.CancelledError:
+ # undo partially completed work
+ raise
+
+ Args:
+ ----
+ coro_fns: an iterable of coroutine functions, i.e. callables that
+ return a coroutine object when called. Use ``functools.partial`` or
+ lambdas to pass arguments.
+
+ delay: amount of time, in seconds, between starting coroutines. If
+ ``None``, the coroutines will run sequentially.
+
+ loop: the event loop to use. If ``None``, the running loop is used.
+
+ Returns:
+ -------
+ tuple *(winner_result, winner_index, exceptions)* where
+
+ - *winner_result*: the result of the winning coroutine, or ``None``
+ if no coroutines won.
+
+ - *winner_index*: the index of the winning coroutine in
+ ``coro_fns``, or ``None`` if no coroutines won. If the winning
+ coroutine may return None on success, *winner_index* can be used
+ to definitively determine whether any coroutine won.
+
+ - *exceptions*: list of exceptions returned by the coroutines.
+ ``len(exceptions)`` is equal to the number of coroutines actually
+ started, and the order is the same as in ``coro_fns``. The winning
+ coroutine's entry is ``None``.
+
+ """
+ loop = loop or asyncio.get_running_loop()
+ exceptions: List[Optional[BaseException]] = []
+ tasks: Set[asyncio.Task[Optional[Tuple[_T, int]]]] = set()
+
+ async def run_one_coro(
+ coro_fn: Callable[[], Awaitable[_T]],
+ this_index: int,
+ start_next: "asyncio.Future[None]",
+ ) -> Optional[Tuple[_T, int]]:
+ """
+ Run a single coroutine.
+
+ If the coroutine fails, set the exception in the exceptions list and
+ start the next coroutine by setting the result of the start_next.
+
+ If the coroutine succeeds, return the result and the index of the
+ coroutine in the coro_fns list.
+
+ If SystemExit or KeyboardInterrupt is raised, re-raise it.
+ """
+ try:
+ result = await coro_fn()
+ except RE_RAISE_EXCEPTIONS:
+ raise
+ except BaseException as e:
+ exceptions[this_index] = e
+ _set_result(start_next) # Kickstart the next coroutine
+ return None
+
+ return result, this_index
+
+ start_next_timer: Optional[asyncio.TimerHandle] = None
+ start_next: Optional[asyncio.Future[None]]
+ task: asyncio.Task[Optional[Tuple[_T, int]]]
+ done: Union[asyncio.Future[None], asyncio.Task[Optional[Tuple[_T, int]]]]
+ coro_iter = iter(coro_fns)
+ this_index = -1
+ try:
+ while True:
+ if coro_fn := next(coro_iter, None):
+ this_index += 1
+ exceptions.append(None)
+ start_next = loop.create_future()
+ task = loop.create_task(run_one_coro(coro_fn, this_index, start_next))
+ tasks.add(task)
+ start_next_timer = (
+ loop.call_later(delay, _set_result, start_next) if delay else None
+ )
+ elif not tasks:
+ # We exhausted the coro_fns list and no tasks are running
+ # so we have no winner and all coroutines failed.
+ break
+
+ while tasks or start_next:
+ done = await _wait_one(
+ (*tasks, start_next) if start_next else tasks, loop
+ )
+ if done is start_next:
+ # The current task has failed or the timer has expired
+ # so we need to start the next task.
+ start_next = None
+ if start_next_timer:
+ start_next_timer.cancel()
+ start_next_timer = None
+
+ # Break out of the task waiting loop to start the next
+ # task.
+ break
+
+ if TYPE_CHECKING:
+ assert isinstance(done, asyncio.Task)
+
+ tasks.remove(done)
+ if winner := done.result():
+ return *winner, exceptions
+ finally:
+ # We either have:
+ # - a winner
+ # - all tasks failed
+ # - a KeyboardInterrupt or SystemExit.
+
+ #
+ # If the timer is still running, cancel it.
+ #
+ if start_next_timer:
+ start_next_timer.cancel()
+
+ #
+ # If there are any tasks left, cancel them and than
+ # wait them so they fill the exceptions list.
+ #
+ for task in tasks:
+ task.cancel()
+ with contextlib.suppress(asyncio.CancelledError):
+ await task
+
+ return None, None, exceptions
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/impl.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/impl.py"
new file mode 100644
index 0000000..8f3919a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/impl.py"
@@ -0,0 +1,259 @@
+"""Base implementation."""
+
+import asyncio
+import collections
+import contextlib
+import functools
+import itertools
+import socket
+from typing import List, Optional, Sequence, Set, Union
+
+from . import _staggered
+from .types import AddrInfoType, SocketFactoryType
+
+
+async def start_connection(
+ addr_infos: Sequence[AddrInfoType],
+ *,
+ local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
+ happy_eyeballs_delay: Optional[float] = None,
+ interleave: Optional[int] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ socket_factory: Optional[SocketFactoryType] = None,
+) -> socket.socket:
+ """
+ Connect to a TCP server.
+
+ Create a socket connection to a specified destination. The
+ destination is specified as a list of AddrInfoType tuples as
+ returned from getaddrinfo().
+
+ The arguments are, in order:
+
+ * ``family``: the address family, e.g. ``socket.AF_INET`` or
+ ``socket.AF_INET6``.
+ * ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or
+ ``socket.SOCK_DGRAM``.
+ * ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or
+ ``socket.IPPROTO_UDP``.
+ * ``canonname``: the canonical name of the address, e.g.
+ ``"www.python.org"``.
+ * ``sockaddr``: the socket address
+
+ This method is a coroutine which will try to establish the connection
+ in the background. When successful, the coroutine returns a
+ socket.
+
+ The expected use case is to use this method in conjunction with
+ loop.create_connection() to establish a connection to a server::
+
+ socket = await start_connection(addr_infos)
+ transport, protocol = await loop.create_connection(
+ MyProtocol, sock=socket, ...)
+ """
+ if not (current_loop := loop):
+ current_loop = asyncio.get_running_loop()
+
+ single_addr_info = len(addr_infos) == 1
+
+ if happy_eyeballs_delay is not None and interleave is None:
+ # If using happy eyeballs, default to interleave addresses by family
+ interleave = 1
+
+ if interleave and not single_addr_info:
+ addr_infos = _interleave_addrinfos(addr_infos, interleave)
+
+ sock: Optional[socket.socket] = None
+ # uvloop can raise RuntimeError instead of OSError
+ exceptions: List[List[Union[OSError, RuntimeError]]] = []
+ if happy_eyeballs_delay is None or single_addr_info:
+ # not using happy eyeballs
+ for addrinfo in addr_infos:
+ try:
+ sock = await _connect_sock(
+ current_loop,
+ exceptions,
+ addrinfo,
+ local_addr_infos,
+ None,
+ socket_factory,
+ )
+ break
+ except (RuntimeError, OSError):
+ continue
+ else: # using happy eyeballs
+ open_sockets: Set[socket.socket] = set()
+ try:
+ sock, _, _ = await _staggered.staggered_race(
+ (
+ functools.partial(
+ _connect_sock,
+ current_loop,
+ exceptions,
+ addrinfo,
+ local_addr_infos,
+ open_sockets,
+ socket_factory,
+ )
+ for addrinfo in addr_infos
+ ),
+ happy_eyeballs_delay,
+ )
+ finally:
+ # If we have a winner, staggered_race will
+ # cancel the other tasks, however there is a
+ # small race window where any of the other tasks
+ # can be done before they are cancelled which
+ # will leave the socket open. To avoid this problem
+ # we pass a set to _connect_sock to keep track of
+ # the open sockets and close them here if there
+ # are any "runner up" sockets.
+ for s in open_sockets:
+ if s is not sock:
+ with contextlib.suppress(OSError):
+ s.close()
+ open_sockets = None # type: ignore[assignment]
+
+ if sock is None:
+ all_exceptions = [exc for sub in exceptions for exc in sub]
+ try:
+ first_exception = all_exceptions[0]
+ if len(all_exceptions) == 1:
+ raise first_exception
+ else:
+ # If they all have the same str(), raise one.
+ model = str(first_exception)
+ if all(str(exc) == model for exc in all_exceptions):
+ raise first_exception
+ # Raise a combined exception so the user can see all
+ # the various error messages.
+ msg = "Multiple exceptions: {}".format(
+ ", ".join(str(exc) for exc in all_exceptions)
+ )
+ # If the errno is the same for all exceptions, raise
+ # an OSError with that errno.
+ if isinstance(first_exception, OSError):
+ first_errno = first_exception.errno
+ if all(
+ isinstance(exc, OSError) and exc.errno == first_errno
+ for exc in all_exceptions
+ ):
+ raise OSError(first_errno, msg)
+ elif isinstance(first_exception, RuntimeError) and all(
+ isinstance(exc, RuntimeError) for exc in all_exceptions
+ ):
+ raise RuntimeError(msg)
+ # We have a mix of OSError and RuntimeError
+ # so we have to pick which one to raise.
+ # and we raise OSError for compatibility
+ raise OSError(msg)
+ finally:
+ all_exceptions = None # type: ignore[assignment]
+ exceptions = None # type: ignore[assignment]
+
+ return sock
+
+
+async def _connect_sock(
+ loop: asyncio.AbstractEventLoop,
+ exceptions: List[List[Union[OSError, RuntimeError]]],
+ addr_info: AddrInfoType,
+ local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
+ open_sockets: Optional[Set[socket.socket]] = None,
+ socket_factory: Optional[SocketFactoryType] = None,
+) -> socket.socket:
+ """
+ Create, bind and connect one socket.
+
+ If open_sockets is passed, add the socket to the set of open sockets.
+ Any failure caught here will remove the socket from the set and close it.
+
+ Callers can use this set to close any sockets that are not the winner
+ of all staggered tasks in the result there are runner up sockets aka
+ multiple winners.
+ """
+ my_exceptions: List[Union[OSError, RuntimeError]] = []
+ exceptions.append(my_exceptions)
+ family, type_, proto, _, address = addr_info
+ sock = None
+ try:
+ if socket_factory is not None:
+ sock = socket_factory(addr_info)
+ else:
+ sock = socket.socket(family=family, type=type_, proto=proto)
+ if open_sockets is not None:
+ open_sockets.add(sock)
+ sock.setblocking(False)
+ if local_addr_infos is not None:
+ for lfamily, _, _, _, laddr in local_addr_infos:
+ # skip local addresses of different family
+ if lfamily != family:
+ continue
+ try:
+ sock.bind(laddr)
+ break
+ except OSError as exc:
+ msg = (
+ f"error while attempting to bind on "
+ f"address {laddr!r}: "
+ f"{(exc.strerror or '').lower()}"
+ )
+ exc = OSError(exc.errno, msg)
+ my_exceptions.append(exc)
+ else: # all bind attempts failed
+ if my_exceptions:
+ raise my_exceptions.pop()
+ else:
+ raise OSError(f"no matching local address with {family=} found")
+ await loop.sock_connect(sock, address)
+ return sock
+ except (RuntimeError, OSError) as exc:
+ my_exceptions.append(exc)
+ if sock is not None:
+ if open_sockets is not None:
+ open_sockets.remove(sock)
+ try:
+ sock.close()
+ except OSError as e:
+ my_exceptions.append(e)
+ raise
+ raise
+ except:
+ if sock is not None:
+ if open_sockets is not None:
+ open_sockets.remove(sock)
+ try:
+ sock.close()
+ except OSError as e:
+ my_exceptions.append(e)
+ raise
+ raise
+ finally:
+ exceptions = my_exceptions = None # type: ignore[assignment]
+
+
+def _interleave_addrinfos(
+ addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1
+) -> List[AddrInfoType]:
+ """Interleave list of addrinfo tuples by family."""
+ # Group addresses by family
+ addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = (
+ collections.OrderedDict()
+ )
+ for addr in addrinfos:
+ family = addr[0]
+ if family not in addrinfos_by_family:
+ addrinfos_by_family[family] = []
+ addrinfos_by_family[family].append(addr)
+ addrinfos_lists = list(addrinfos_by_family.values())
+
+ reordered: List[AddrInfoType] = []
+ if first_address_family_count > 1:
+ reordered.extend(addrinfos_lists[0][: first_address_family_count - 1])
+ del addrinfos_lists[0][: first_address_family_count - 1]
+ reordered.extend(
+ a
+ for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists))
+ if a is not None
+ )
+ return reordered
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/types.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/types.py"
new file mode 100644
index 0000000..e8c7507
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/types.py"
@@ -0,0 +1,17 @@
+"""Types for aiohappyeyeballs."""
+
+import socket
+
+# PY3.9: Import Callable from typing until we drop Python 3.9 support
+# https://github.com/python/cpython/issues/87131
+from typing import Callable, Tuple, Union
+
+AddrInfoType = Tuple[
+ Union[int, socket.AddressFamily],
+ Union[int, socket.SocketKind],
+ int,
+ str,
+ Tuple, # type: ignore[type-arg]
+]
+
+SocketFactoryType = Callable[[AddrInfoType], socket.socket]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/utils.py"
new file mode 100644
index 0000000..ea29adb
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohappyeyeballs/utils.py"
@@ -0,0 +1,97 @@
+"""Utility functions for aiohappyeyeballs."""
+
+import ipaddress
+import socket
+from typing import Dict, List, Optional, Tuple, Union
+
+from .types import AddrInfoType
+
+
+def addr_to_addr_infos(
+ addr: Optional[
+ Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]]
+ ],
+) -> Optional[List[AddrInfoType]]:
+ """Convert an address tuple to a list of addr_info tuples."""
+ if addr is None:
+ return None
+ host = addr[0]
+ port = addr[1]
+ is_ipv6 = ":" in host
+ if is_ipv6:
+ flowinfo = 0
+ scopeid = 0
+ addr_len = len(addr)
+ if addr_len >= 4:
+ scopeid = addr[3] # type: ignore[misc]
+ if addr_len >= 3:
+ flowinfo = addr[2] # type: ignore[misc]
+ addr = (host, port, flowinfo, scopeid)
+ family = socket.AF_INET6
+ else:
+ addr = (host, port)
+ family = socket.AF_INET
+ return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)]
+
+
+def pop_addr_infos_interleave(
+ addr_infos: List[AddrInfoType], interleave: Optional[int] = None
+) -> None:
+ """
+ Pop addr_info from the list of addr_infos by family up to interleave times.
+
+ The interleave parameter is used to know how many addr_infos for
+ each family should be popped of the top of the list.
+ """
+ seen: Dict[int, int] = {}
+ if interleave is None:
+ interleave = 1
+ to_remove: List[AddrInfoType] = []
+ for addr_info in addr_infos:
+ family = addr_info[0]
+ if family not in seen:
+ seen[family] = 0
+ if seen[family] < interleave:
+ to_remove.append(addr_info)
+ seen[family] += 1
+ for addr_info in to_remove:
+ addr_infos.remove(addr_info)
+
+
+def _addr_tuple_to_ip_address(
+ addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
+) -> Union[
+ Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int]
+]:
+ """Convert an address tuple to an IPv4Address."""
+ return (ipaddress.ip_address(addr[0]), *addr[1:])
+
+
+def remove_addr_infos(
+ addr_infos: List[AddrInfoType],
+ addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
+) -> None:
+ """
+ Remove an address from the list of addr_infos.
+
+ The addr value is typically the return value of
+ sock.getpeername().
+ """
+ bad_addrs_infos: List[AddrInfoType] = []
+ for addr_info in addr_infos:
+ if addr_info[-1] == addr:
+ bad_addrs_infos.append(addr_info)
+ if bad_addrs_infos:
+ for bad_addr_info in bad_addrs_infos:
+ addr_infos.remove(bad_addr_info)
+ return
+ # Slow path in case addr is formatted differently
+ match_addr = _addr_tuple_to_ip_address(addr)
+ for addr_info in addr_infos:
+ if match_addr == _addr_tuple_to_ip_address(addr_info[-1]):
+ bad_addrs_infos.append(addr_info)
+ if bad_addrs_infos:
+ for bad_addr_info in bad_addrs_infos:
+ addr_infos.remove(bad_addr_info)
+ return
+ raise ValueError(f"Address {addr} not found in addr_infos")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA"
new file mode 100644
index 0000000..078765d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/METADATA"
@@ -0,0 +1,262 @@
+Metadata-Version: 2.4
+Name: aiohttp
+Version: 3.13.3
+Summary: Async http client/server framework (asyncio)
+Maintainer-email: aiohttp team <team@aiohttp.org>
+License: Apache-2.0 AND MIT
+Project-URL: Homepage, https://github.com/aio-libs/aiohttp
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
+Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
+Project-URL: Docs: RTD, https://docs.aiohttp.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Framework :: AsyncIO
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE.txt
+License-File: vendor/llhttp/LICENSE
+Requires-Dist: aiohappyeyeballs>=2.5.0
+Requires-Dist: aiosignal>=1.4.0
+Requires-Dist: async-timeout<6.0,>=4.0; python_version < "3.11"
+Requires-Dist: attrs>=17.3.0
+Requires-Dist: frozenlist>=1.1.1
+Requires-Dist: multidict<7.0,>=4.5
+Requires-Dist: propcache>=0.2.0
+Requires-Dist: yarl<2.0,>=1.17.0
+Provides-Extra: speedups
+Requires-Dist: aiodns>=3.3.0; extra == "speedups"
+Requires-Dist: Brotli>=1.2; platform_python_implementation == "CPython" and extra == "speedups"
+Requires-Dist: brotlicffi>=1.2; platform_python_implementation != "CPython" and extra == "speedups"
+Requires-Dist: backports.zstd; (platform_python_implementation == "CPython" and python_version < "3.14") and extra == "speedups"
+Dynamic: license-file
+
+==================================
+Async http client/server framework
+==================================
+
+.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
+ :height: 64px
+ :width: 64px
+ :alt: aiohttp logo
+
+|
+
+.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+ :alt: GitHub Actions status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiohttp
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiohttp.svg
+ :target: https://pypi.org/project/aiohttp
+ :alt: Latest PyPI package version
+
+.. image:: https://img.shields.io/pypi/dm/aiohttp
+ :target: https://pypistats.org/packages/aiohttp
+ :alt: Downloads count
+
+.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
+ :target: https://docs.aiohttp.org/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
+ :target: https://codspeed.io/aio-libs/aiohttp
+ :alt: Codspeed.io status for aiohttp
+
+
+Key Features
+============
+
+- Supports both client and server side of HTTP protocol.
+- Supports both client and server Web-Sockets out-of-the-box and avoids
+ Callback Hell.
+- Provides Web-server with middleware and pluggable routing.
+
+
+Getting started
+===============
+
+Client
+------
+
+To get something from the web:
+
+.. code-block:: python
+
+ import aiohttp
+ import asyncio
+
+ async def main():
+
+ async with aiohttp.ClientSession() as session:
+ async with session.get('http://python.org') as response:
+
+ print("Status:", response.status)
+ print("Content-type:", response.headers['content-type'])
+
+ html = await response.text()
+ print("Body:", html[:15], "...")
+
+ asyncio.run(main())
+
+This prints:
+
+.. code-block::
+
+ Status: 200
+ Content-type: text/html; charset=utf-8
+ Body: <!doctype html> ...
+
+Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
+
+Server
+------
+
+An example using a simple server:
+
+.. code-block:: python
+
+ # examples/server_simple.py
+ from aiohttp import web
+
+ async def handle(request):
+ name = request.match_info.get('name', "Anonymous")
+ text = "Hello, " + name
+ return web.Response(text=text)
+
+ async def wshandle(request):
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+
+ async for msg in ws:
+ if msg.type == web.WSMsgType.text:
+ await ws.send_str("Hello, {}".format(msg.data))
+ elif msg.type == web.WSMsgType.binary:
+ await ws.send_bytes(msg.data)
+ elif msg.type == web.WSMsgType.close:
+ break
+
+ return ws
+
+
+ app = web.Application()
+ app.add_routes([web.get('/', handle),
+ web.get('/echo', wshandle),
+ web.get('/{name}', handle)])
+
+ if __name__ == '__main__':
+ web.run_app(app)
+
+
+Documentation
+=============
+
+https://aiohttp.readthedocs.io/
+
+
+Demos
+=====
+
+https://github.com/aio-libs/aiohttp-demos
+
+
+External links
+==============
+
+* `Third party libraries
+ <http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
+* `Built with aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
+* `Powered by aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
+
+Feel free to make a Pull Request for adding your link to these pages!
+
+
+Communication channels
+======================
+
+*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions
+
+*Matrix*: `#aio-libs:matrix.org <https://matrix.to/#/#aio-libs:matrix.org>`_
+
+We support `Stack Overflow
+<https://stackoverflow.com/questions/tagged/aiohttp>`_.
+Please add *aiohttp* tag to your question there.
+
+Requirements
+============
+
+- attrs_
+- multidict_
+- yarl_
+- frozenlist_
+
+Optionally you may install the aiodns_ library (highly recommended for sake of speed).
+
+.. _aiodns: https://pypi.python.org/pypi/aiodns
+.. _attrs: https://github.com/python-attrs/attrs
+.. _multidict: https://pypi.python.org/pypi/multidict
+.. _frozenlist: https://pypi.org/project/frozenlist/
+.. _yarl: https://pypi.python.org/pypi/yarl
+.. _async-timeout: https://pypi.python.org/pypi/async_timeout
+
+License
+=======
+
+``aiohttp`` is offered under the Apache 2 license.
+
+
+Keepsafe
+========
+
+The aiohttp community would like to thank Keepsafe
+(https://www.getkeepsafe.com) for its support in the early days of
+the project.
+
+
+Source code
+===========
+
+The latest developer version is available in a GitHub repository:
+https://github.com/aio-libs/aiohttp
+
+Benchmarks
+==========
+
+If you are interested in efficiency, the AsyncIO community maintains a
+list of benchmarks on the official wiki:
+https://github.com/python/asyncio/wiki/Benchmarks
+
+--------
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+.. image:: https://insights.linuxfoundation.org/api/badge/health-score?project=aiohttp
+ :target: https://insights.linuxfoundation.org/project/aiohttp
+ :alt: LFX Health Score
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD"
new file mode 100644
index 0000000..7fbdeaa
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/RECORD"
@@ -0,0 +1,139 @@
+aiohttp-3.13.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+aiohttp-3.13.3.dist-info/METADATA,sha256=jkzui8KtHZ32gb8TfFZwIW4-zZ6Sr1eh1R6wYZW79Sg,8407
+aiohttp-3.13.3.dist-info/RECORD,,
+aiohttp-3.13.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+aiohttp-3.13.3.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101
+aiohttp-3.13.3.dist-info/licenses/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601
+aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE,sha256=bd-mKNt20th7iWi6-61g9RxOyIEA3Xu5b5chbYivCAg,1127
+aiohttp-3.13.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
+aiohttp/.hash/_cparser.pxd.hash,sha256=eJQ2z7M7WoAng7D5ukCXzE3Yx22bLgv1PyOe0YbbQTM,108
+aiohttp/.hash/_find_header.pxd.hash,sha256=TxG5w4etbVd6sfm5JWbdf5PW6LnuXRQnlMoFBVGKN2E,112
+aiohttp/.hash/_http_parser.pyx.hash,sha256=NYbk_8ETW0vAtpTcxRVuWVmKJr9CUh2fR8I9emVQck4,112
+aiohttp/.hash/_http_writer.pyx.hash,sha256=J4W44iDZQwIyZ0rGO5v-_sKIfPtAwqn99EwgaevQmo8,112
+aiohttp/.hash/hdrs.py.hash,sha256=c2N-IMHz4dvAGL36CUyEw15noHE2AkJTeSBy3IxcCec,103
+aiohttp/__init__.py,sha256=wTWxnyVGn59VuoFuK1m2_jJ-Cw5Be9ktp7h5Hfvyaas,8580
+aiohttp/__pycache__/__init__.cpython-312.pyc,,
+aiohttp/__pycache__/_cookie_helpers.cpython-312.pyc,,
+aiohttp/__pycache__/abc.cpython-312.pyc,,
+aiohttp/__pycache__/base_protocol.cpython-312.pyc,,
+aiohttp/__pycache__/client.cpython-312.pyc,,
+aiohttp/__pycache__/client_exceptions.cpython-312.pyc,,
+aiohttp/__pycache__/client_middleware_digest_auth.cpython-312.pyc,,
+aiohttp/__pycache__/client_middlewares.cpython-312.pyc,,
+aiohttp/__pycache__/client_proto.cpython-312.pyc,,
+aiohttp/__pycache__/client_reqrep.cpython-312.pyc,,
+aiohttp/__pycache__/client_ws.cpython-312.pyc,,
+aiohttp/__pycache__/compression_utils.cpython-312.pyc,,
+aiohttp/__pycache__/connector.cpython-312.pyc,,
+aiohttp/__pycache__/cookiejar.cpython-312.pyc,,
+aiohttp/__pycache__/formdata.cpython-312.pyc,,
+aiohttp/__pycache__/hdrs.cpython-312.pyc,,
+aiohttp/__pycache__/helpers.cpython-312.pyc,,
+aiohttp/__pycache__/http.cpython-312.pyc,,
+aiohttp/__pycache__/http_exceptions.cpython-312.pyc,,
+aiohttp/__pycache__/http_parser.cpython-312.pyc,,
+aiohttp/__pycache__/http_websocket.cpython-312.pyc,,
+aiohttp/__pycache__/http_writer.cpython-312.pyc,,
+aiohttp/__pycache__/log.cpython-312.pyc,,
+aiohttp/__pycache__/multipart.cpython-312.pyc,,
+aiohttp/__pycache__/payload.cpython-312.pyc,,
+aiohttp/__pycache__/payload_streamer.cpython-312.pyc,,
+aiohttp/__pycache__/pytest_plugin.cpython-312.pyc,,
+aiohttp/__pycache__/resolver.cpython-312.pyc,,
+aiohttp/__pycache__/streams.cpython-312.pyc,,
+aiohttp/__pycache__/tcp_helpers.cpython-312.pyc,,
+aiohttp/__pycache__/test_utils.cpython-312.pyc,,
+aiohttp/__pycache__/tracing.cpython-312.pyc,,
+aiohttp/__pycache__/typedefs.cpython-312.pyc,,
+aiohttp/__pycache__/web.cpython-312.pyc,,
+aiohttp/__pycache__/web_app.cpython-312.pyc,,
+aiohttp/__pycache__/web_exceptions.cpython-312.pyc,,
+aiohttp/__pycache__/web_fileresponse.cpython-312.pyc,,
+aiohttp/__pycache__/web_log.cpython-312.pyc,,
+aiohttp/__pycache__/web_middlewares.cpython-312.pyc,,
+aiohttp/__pycache__/web_protocol.cpython-312.pyc,,
+aiohttp/__pycache__/web_request.cpython-312.pyc,,
+aiohttp/__pycache__/web_response.cpython-312.pyc,,
+aiohttp/__pycache__/web_routedef.cpython-312.pyc,,
+aiohttp/__pycache__/web_runner.cpython-312.pyc,,
+aiohttp/__pycache__/web_server.cpython-312.pyc,,
+aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc,,
+aiohttp/__pycache__/web_ws.cpython-312.pyc,,
+aiohttp/__pycache__/worker.cpython-312.pyc,,
+aiohttp/_cookie_helpers.py,sha256=x6tVKd6fgqjIFQzQ_z-t_CRl-Pnar7qJh8HUwroSKIA,13997
+aiohttp/_cparser.pxd,sha256=GP0Y9NqZYQGkJtS81XDzU70e7rRMb34TR7yGMmx5_zs,4453
+aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
+aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090
+aiohttp/_http_parser.cp312-win_amd64.pyd,sha256=kVErC3Q1vBoeaoCynkMwWayfaXk4Ju-VaWbOVdGcwB8,248832
+aiohttp/_http_parser.pyx,sha256=9-jyYF9-4i7ToMV0mvVgQ_rqNa8KGJfhQVY0GGrZuGg,29096
+aiohttp/_http_writer.cp312-win_amd64.pyd,sha256=e2t5uBtwmasH8kAxdg6QOvalydEl5-m3n46J4WSffiI,47104
+aiohttp/_http_writer.pyx,sha256=WWdOf19QPqScBkifDhJynqPPOAmwB9sKJAO0Kkor4tE,4826
+aiohttp/_websocket/.hash/mask.pxd.hash,sha256=TL0gGYyJWxqG8dWwa08B74WGg6-0M6_Breqrff-AiZg,115
+aiohttp/_websocket/.hash/mask.pyx.hash,sha256=7xo6f01JaOQmaUNij3dQlOgxkEC1edkAIhwpeOvimLI,115
+aiohttp/_websocket/.hash/reader_c.pxd.hash,sha256=RzhqjHN1HadWDeMHVQvaf-XLlGxF6nm5u-HJHGsx2aE,119
+aiohttp/_websocket/__init__.py,sha256=R51KWH5kkdtDLb7T-ilztksbfweKCy3t22SgxGtiY-4,45
+aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc,,
+aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc,,
+aiohttp/_websocket/__pycache__/models.cpython-312.pyc,,
+aiohttp/_websocket/__pycache__/reader.cpython-312.pyc,,
+aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc,,
+aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc,,
+aiohttp/_websocket/__pycache__/writer.cpython-312.pyc,,
+aiohttp/_websocket/helpers.py,sha256=amqvDhoAKAi8ptB4qUNuQhkaOn-4JxSh_VLAqytmEfw,5185
+aiohttp/_websocket/mask.cp312-win_amd64.pyd,sha256=Q7mH9VajqPagYj6NGCurPmwJWcMZU07zN4FEkfUAP_c,36864
+aiohttp/_websocket/mask.pxd,sha256=41TdSZvhcbYSW_Vrw7bF4r_yoor2njtdaZ3bmvK6-jw,115
+aiohttp/_websocket/mask.pyx,sha256=Ro7dOOv43HAAqNMz3xyCA11ppcn-vARIvjycStTEYww,1445
+aiohttp/_websocket/models.py,sha256=Pz8qvnU43VUCNZcY4g03VwTsHOsb_jSN8iG69xMAc_A,2205
+aiohttp/_websocket/reader.py,sha256=1r0cJ-jdFgbSrC6-jI0zjEA1CppzoUn8u_wiebrVVO0,1061
+aiohttp/_websocket/reader_c.cp312-win_amd64.pyd,sha256=2gSIJBH5w8xkfbErzqeI_MTILdr4gR4Pc4ytNj_jaD0,147968
+aiohttp/_websocket/reader_c.pxd,sha256=HNOl4gRWtNBNEYNbK9PGOfFEQwUqJGexBbDKB_20sl0,2735
+aiohttp/_websocket/reader_c.py,sha256=UKfslJuANla_CQMe7yIJzE8vp7bpzz9TLr-lH87XW6U,19346
+aiohttp/_websocket/reader_py.py,sha256=UKfslJuANla_CQMe7yIJzE8vp7bpzz9TLr-lH87XW6U,19346
+aiohttp/_websocket/writer.py,sha256=MpuNvG_t34CaDTAzW5FZJaRME8sL19rZotxSbXz2aas,11523
+aiohttp/abc.py,sha256=01N6Y63o2bBC8Vi0ZjO6Jw0V9kXZfy3egwzKFW-tv9c,7417
+aiohttp/base_protocol.py,sha256=8vNIv6QV_SDCW-8tfhlyxSwiBD7dAiMTqJI1GI8RG5s,3125
+aiohttp/client.py,sha256=KlWhIZt935YpOZcXOOZl3eIRkuO-l0z2BH7arfhGg-A,59992
+aiohttp/client_exceptions.py,sha256=sJcuvYKaB2nwuSdP7k18y3wc74aU0xAzdJikzzesrPE,11788
+aiohttp/client_middleware_digest_auth.py,sha256=K4TPt4-rPQ0jjSHx3UFguMN7n31LpCC_o6JA-Hrg_Pc,18107
+aiohttp/client_middlewares.py,sha256=FEVIXFkQ58n5bhK4BGEqqDCWnDh-GNJmWq20I5Yt6SU,1973
+aiohttp/client_proto.py,sha256=rfbg8nUsfpCMM_zGpQygiFn8nzSdBI-731rmXVGHwLc,12469
+aiohttp/client_reqrep.py,sha256=BUrqo2BJbrNazrIJr-ZgMLRTvE2fSON3zPQSq1dfgfU,54927
+aiohttp/client_ws.py,sha256=9DraHuupuJcT7NOgyeGml8SBr7V5D5ID5-piY1fQMdA,15537
+aiohttp/compression_utils.py,sha256=w0ECGGLVjtCXdYg-U_9DBn-DASzDPaWEVRx1HlwWslk,12086
+aiohttp/connector.py,sha256=X2sRe6EAeWiaP6eaK9hWvLtSbdiJfNhK3bWl7XbR_V4,70846
+aiohttp/cookiejar.py,sha256=C2fVzQGFieFP9mFDTOvfEc6fb5kPS2ijL2tFKAUW7Sw,19444
+aiohttp/formdata.py,sha256=sz3VaTHVk11z_5G1LaDhUwrONJ8zRAGlZGg3hcCApzA,6563
+aiohttp/hdrs.py,sha256=7htmhgZyE9HqWbPpxHU0r7kAIdT2kpOXQa1AadDh2W8,5232
+aiohttp/helpers.py,sha256=1tXIvGSRWJD9wsS7GUVHLfJEsDM_XigurpgjxajkH0g,31615
+aiohttp/http.py,sha256=DGKcwDbgIMpasv7s2jeKCRuixyj7W-RIrihRFjj0xcY,1914
+aiohttp/http_exceptions.py,sha256=J3v-1S9S22GfAEtx0pEqp6d4G1Lqi2-gOrdLtuGlEhY,3185
+aiohttp/http_parser.py,sha256=O5ud4wO80WLFe9kpXU0xGhjczUfrb7BAr0XAP7rBn7E,39263
+aiohttp/http_websocket.py,sha256=b9kBmxPLPFQP_nu_sMhIMIeqDOm0ug8G4prbrhEMHZ0,878
+aiohttp/http_writer.py,sha256=jA_aJW7JdH1mihrIYdJcLOHVKQ4Agg3g993v50eITBs,12824
+aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333
+aiohttp/multipart.py,sha256=UvcLOX3lO3ad3nfODsdlyvYWMAZHdUZ-wlZ5w1TbD2E,41634
+aiohttp/payload.py,sha256=Xbs_2l0wDaThFG-ehNlvzQUkHuBPpc5FxpJnJa3ZPcs,41994
+aiohttp/payload_streamer.py,sha256=K0iV85iW0vEG3rDkcopruidspynzQvrwW8mJvgPHisg,2289
+aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8
+aiohttp/pytest_plugin.py,sha256=ymhjbYHz2Kf0ZU_4Ly0hAp73dhsgrQIzJDo4Aot3_TI,13345
+aiohttp/resolver.py,sha256=ePJgZAN5EQY4YuFiuZmVZM6p3UuzJ4qMWM1fu8DJ2Fc,10305
+aiohttp/streams.py,sha256=J0G4ZJPdRScOPtnaB1ixhQYjLunLk8z70mfN9bc5K_o,24424
+aiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998
+aiohttp/test_utils.py,sha256=zFWAb-rPz1fWRUHnrjnfUH7ORlfIgZ2UZbEGe4YTa9I,23790
+aiohttp/tracing.py,sha256=Kb-N32aMmYqC2Yc82NV6l0mIcavSQst1BHSFj94Apl0,15013
+aiohttp/typedefs.py,sha256=Sx5v2yUyLu8nbabqtJRWj1M1_uW0IZACu78uYD7LBy0,1726
+aiohttp/web.py,sha256=BQ96NEuTWikKGN5NnnTHjFLt07GUMWvvn42iFuIS3Mg,18444
+aiohttp/web_app.py,sha256=WwEEzUg34j81kK2dPFnhlqx_z6nGjnHZDweZJF65pKc,20072
+aiohttp/web_exceptions.py,sha256=itNRhCMDJFhnMWftr5SyTsoqh-i0n9rzTj0sjcAEUjo,10812
+aiohttp/web_fileresponse.py,sha256=QIIbcIruCgfYrc8ZDvOgNlZzLbAagwXA9FrNI7NKNPY,16780
+aiohttp/web_log.py,sha256=G5ugloW9noUxPft0SmVWOXw30MviL6rqZc3XrKN_T1U,8081
+aiohttp/web_middlewares.py,sha256=mM2-R8eaV2r6Mi9Zc2bDG8QnhE9h0IzPvtDX_fkKR5s,4286
+aiohttp/web_protocol.py,sha256=gJaDFtYPA-1gz35fwchjLhxrkmXXMOzFMCDHLQ1FHiI,27802
+aiohttp/web_request.py,sha256=9zqyP32ScMUylQ_ta4tBHpWmoprhSB4jTgj2ixmGK74,30763
+aiohttp/web_response.py,sha256=WJVumt-P0uMaFSbef_owvOXpq90E4VMl3RvSOWh0nJE,30197
+aiohttp/web_routedef.py,sha256=XC10f57Q36JmYaaQqrecsyfIxHMepCKaKkBEB7hLzJI,6324
+aiohttp/web_runner.py,sha256=zyVYVzCgnopiGwnIhKlNZHtLV_IYQ9aC-Vm43j_HRoA,12185
+aiohttp/web_server.py,sha256=RZSWt_Mj-Lu89bFYsr_T3rjxW2VNN7PHNJ2mvv2qELs,2972
+aiohttp/web_urldispatcher.py,sha256=4FiNFUWU_jITYl_DnObptuF5c0ShXAEiWyLVmE-GtN0,45595
+aiohttp/web_ws.py,sha256=VXHGDtfy_jrBByLvuhnL-A_PmpcoT_ZLyYdj_EcL3Hw,23370
+aiohttp/worker.py,sha256=N_9iyS_tR9U0pf3BRaIH2nzA1pjN1Xfi2gGmRrMhnho,8407
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/REQUESTED" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/REQUESTED"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/REQUESTED"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL"
new file mode 100644
index 0000000..10ac2c2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt"
new file mode 100644
index 0000000..e497a32
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/LICENSE.txt"
@@ -0,0 +1,13 @@
+ Copyright aio-libs contributors.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE"
new file mode 100644
index 0000000..6c1512d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/licenses/vendor/llhttp/LICENSE"
@@ -0,0 +1,22 @@
+This software is licensed under the MIT License.
+
+Copyright Fedor Indutny, 2018.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to permit
+persons to whom the Software is furnished to do so, subject to the
+following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt"
new file mode 100644
index 0000000..ee4ba4f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp-3.13.3.dist-info/top_level.txt"
@@ -0,0 +1 @@
+aiohttp
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash"
new file mode 100644
index 0000000..5322009
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash"
@@ -0,0 +1 @@
+18fd18f4da996101a426d4bcd570f353bd1eeeb44c6f7e1347bc86326c79ff3b *D:/a/aiohttp/aiohttp/aiohttp/_cparser.pxd
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash"
new file mode 100644
index 0000000..8af9f81
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash"
@@ -0,0 +1 @@
+0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 *D:/a/aiohttp/aiohttp/aiohttp/_find_header.pxd
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash"
new file mode 100644
index 0000000..690de3e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash"
@@ -0,0 +1 @@
+f7e8f2605f7ee22ed3a0c5749af56043faea35af0a1897e1415634186ad9b868 *D:/a/aiohttp/aiohttp/aiohttp/_http_parser.pyx
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash"
new file mode 100644
index 0000000..5e3dbb4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash"
@@ -0,0 +1 @@
+59674e7f5f503ea49c06489f0e12729ea3cf3809b007db0a2403b42a4a2be2d1 *D:/a/aiohttp/aiohttp/aiohttp/_http_writer.pyx
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/hdrs.py.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/hdrs.py.hash"
new file mode 100644
index 0000000..e4f3c29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/.hash/hdrs.py.hash"
@@ -0,0 +1 @@
+ee1b6686067213d1ea59b3e9c47534afb90021d4f692939741ad4069d0e1d96f *D:/a/aiohttp/aiohttp/aiohttp/hdrs.py
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/__init__.py"
new file mode 100644
index 0000000..357baf0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/__init__.py"
@@ -0,0 +1,278 @@
+__version__ = "3.13.3"
+
+from typing import TYPE_CHECKING, Tuple
+
+from . import hdrs as hdrs
+from .client import (
+ BaseConnector,
+ ClientConnectionError,
+ ClientConnectionResetError,
+ ClientConnectorCertificateError,
+ ClientConnectorDNSError,
+ ClientConnectorError,
+ ClientConnectorSSLError,
+ ClientError,
+ ClientHttpProxyError,
+ ClientOSError,
+ ClientPayloadError,
+ ClientProxyConnectionError,
+ ClientRequest,
+ ClientResponse,
+ ClientResponseError,
+ ClientSession,
+ ClientSSLError,
+ ClientTimeout,
+ ClientWebSocketResponse,
+ ClientWSTimeout,
+ ConnectionTimeoutError,
+ ContentTypeError,
+ Fingerprint,
+ InvalidURL,
+ InvalidUrlClientError,
+ InvalidUrlRedirectClientError,
+ NamedPipeConnector,
+ NonHttpUrlClientError,
+ NonHttpUrlRedirectClientError,
+ RedirectClientError,
+ RequestInfo,
+ ServerConnectionError,
+ ServerDisconnectedError,
+ ServerFingerprintMismatch,
+ ServerTimeoutError,
+ SocketTimeoutError,
+ TCPConnector,
+ TooManyRedirects,
+ UnixConnector,
+ WSMessageTypeError,
+ WSServerHandshakeError,
+ request,
+)
+from .client_middleware_digest_auth import DigestAuthMiddleware
+from .client_middlewares import ClientHandlerType, ClientMiddlewareType
+from .compression_utils import set_zlib_backend
+from .connector import (
+ AddrInfoType as AddrInfoType,
+ SocketFactoryType as SocketFactoryType,
+)
+from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
+from .formdata import FormData as FormData
+from .helpers import BasicAuth, ChainMapProxy, ETag
+from .http import (
+ HttpVersion as HttpVersion,
+ HttpVersion10 as HttpVersion10,
+ HttpVersion11 as HttpVersion11,
+ WebSocketError as WebSocketError,
+ WSCloseCode as WSCloseCode,
+ WSMessage as WSMessage,
+ WSMsgType as WSMsgType,
+)
+from .multipart import (
+ BadContentDispositionHeader as BadContentDispositionHeader,
+ BadContentDispositionParam as BadContentDispositionParam,
+ BodyPartReader as BodyPartReader,
+ MultipartReader as MultipartReader,
+ MultipartWriter as MultipartWriter,
+ content_disposition_filename as content_disposition_filename,
+ parse_content_disposition as parse_content_disposition,
+)
+from .payload import (
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
+ AsyncIterablePayload as AsyncIterablePayload,
+ BufferedReaderPayload as BufferedReaderPayload,
+ BytesIOPayload as BytesIOPayload,
+ BytesPayload as BytesPayload,
+ IOBasePayload as IOBasePayload,
+ JsonPayload as JsonPayload,
+ Payload as Payload,
+ StringIOPayload as StringIOPayload,
+ StringPayload as StringPayload,
+ TextIOPayload as TextIOPayload,
+ get_payload as get_payload,
+ payload_type as payload_type,
+)
+from .payload_streamer import streamer as streamer
+from .resolver import (
+ AsyncResolver as AsyncResolver,
+ DefaultResolver as DefaultResolver,
+ ThreadedResolver as ThreadedResolver,
+)
+from .streams import (
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
+ DataQueue as DataQueue,
+ EofStream as EofStream,
+ FlowControlDataQueue as FlowControlDataQueue,
+ StreamReader as StreamReader,
+)
+from .tracing import (
+ TraceConfig as TraceConfig,
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
+ TraceRequestEndParams as TraceRequestEndParams,
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
+ TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
+ TraceRequestStartParams as TraceRequestStartParams,
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
+)
+
+if TYPE_CHECKING:
+ # At runtime these are lazy-loaded at the bottom of the file.
+ from .worker import (
+ GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
+ GunicornWebWorker as GunicornWebWorker,
+ )
+
+__all__: Tuple[str, ...] = (
+ "hdrs",
+ # client
+ "AddrInfoType",
+ "BaseConnector",
+ "ClientConnectionError",
+ "ClientConnectionResetError",
+ "ClientConnectorCertificateError",
+ "ClientConnectorDNSError",
+ "ClientConnectorError",
+ "ClientConnectorSSLError",
+ "ClientError",
+ "ClientHttpProxyError",
+ "ClientOSError",
+ "ClientPayloadError",
+ "ClientProxyConnectionError",
+ "ClientResponse",
+ "ClientRequest",
+ "ClientResponseError",
+ "ClientSSLError",
+ "ClientSession",
+ "ClientTimeout",
+ "ClientWebSocketResponse",
+ "ClientWSTimeout",
+ "ConnectionTimeoutError",
+ "ContentTypeError",
+ "Fingerprint",
+ "FlowControlDataQueue",
+ "InvalidURL",
+ "InvalidUrlClientError",
+ "InvalidUrlRedirectClientError",
+ "NonHttpUrlClientError",
+ "NonHttpUrlRedirectClientError",
+ "RedirectClientError",
+ "RequestInfo",
+ "ServerConnectionError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ServerTimeoutError",
+ "SocketFactoryType",
+ "SocketTimeoutError",
+ "TCPConnector",
+ "TooManyRedirects",
+ "UnixConnector",
+ "NamedPipeConnector",
+ "WSServerHandshakeError",
+ "request",
+ # client_middleware
+ "ClientMiddlewareType",
+ "ClientHandlerType",
+ # cookiejar
+ "CookieJar",
+ "DummyCookieJar",
+ # formdata
+ "FormData",
+ # helpers
+ "BasicAuth",
+ "ChainMapProxy",
+ "DigestAuthMiddleware",
+ "ETag",
+ "set_zlib_backend",
+ # http
+ "HttpVersion",
+ "HttpVersion10",
+ "HttpVersion11",
+ "WSMsgType",
+ "WSCloseCode",
+ "WSMessage",
+ "WebSocketError",
+ # multipart
+ "BadContentDispositionHeader",
+ "BadContentDispositionParam",
+ "BodyPartReader",
+ "MultipartReader",
+ "MultipartWriter",
+ "content_disposition_filename",
+ "parse_content_disposition",
+ # payload
+ "AsyncIterablePayload",
+ "BufferedReaderPayload",
+ "BytesIOPayload",
+ "BytesPayload",
+ "IOBasePayload",
+ "JsonPayload",
+ "PAYLOAD_REGISTRY",
+ "Payload",
+ "StringIOPayload",
+ "StringPayload",
+ "TextIOPayload",
+ "get_payload",
+ "payload_type",
+ # payload_streamer
+ "streamer",
+ # resolver
+ "AsyncResolver",
+ "DefaultResolver",
+ "ThreadedResolver",
+ # streams
+ "DataQueue",
+ "EMPTY_PAYLOAD",
+ "EofStream",
+ "StreamReader",
+ # tracing
+ "TraceConfig",
+ "TraceConnectionCreateEndParams",
+ "TraceConnectionCreateStartParams",
+ "TraceConnectionQueuedEndParams",
+ "TraceConnectionQueuedStartParams",
+ "TraceConnectionReuseconnParams",
+ "TraceDnsCacheHitParams",
+ "TraceDnsCacheMissParams",
+ "TraceDnsResolveHostEndParams",
+ "TraceDnsResolveHostStartParams",
+ "TraceRequestChunkSentParams",
+ "TraceRequestEndParams",
+ "TraceRequestExceptionParams",
+ "TraceRequestHeadersSentParams",
+ "TraceRequestRedirectParams",
+ "TraceRequestStartParams",
+ "TraceResponseChunkReceivedParams",
+ # workers (imported lazily with __getattr__)
+ "GunicornUVLoopWebWorker",
+ "GunicornWebWorker",
+ "WSMessageTypeError",
+)
+
+
+def __dir__() -> Tuple[str, ...]:
+ return __all__ + ("__doc__",)
+
+
+def __getattr__(name: str) -> object:
+ global GunicornUVLoopWebWorker, GunicornWebWorker
+
+ # Importing gunicorn takes a long time (>100ms), so only import if actually needed.
+ if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
+ try:
+ from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
+ except ImportError:
+ return None
+
+ GunicornUVLoopWebWorker = guv # type: ignore[misc]
+ GunicornWebWorker = gw # type: ignore[misc]
+ return guv if name == "GunicornUVLoopWebWorker" else gw
+
+ raise AttributeError(f"module {__name__} has no attribute {name}")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_cookie_helpers.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_cookie_helpers.py"
new file mode 100644
index 0000000..10e2e0e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_cookie_helpers.py"
@@ -0,0 +1,338 @@
+"""
+Internal cookie handling helpers.
+
+This module contains internal utilities for cookie parsing and manipulation.
+These are not part of the public API and may change without notice.
+"""
+
+import re
+from http.cookies import Morsel
+from typing import List, Optional, Sequence, Tuple, cast
+
+from .log import internal_logger
+
+__all__ = (
+ "parse_set_cookie_headers",
+ "parse_cookie_header",
+ "preserve_morsel_with_coded_value",
+)
+
+# Cookie parsing constants
+# Allow more characters in cookie names to handle real-world cookies
+# that don't strictly follow RFC standards (fixes #2683)
+# RFC 6265 defines cookie-name token as per RFC 2616 Section 2.2,
+# but many servers send cookies with characters like {} [] () etc.
+# This makes the cookie parser more tolerant of real-world cookies
+# while still providing some validation to catch obviously malformed names.
+_COOKIE_NAME_RE = re.compile(r"^[!#$%&\'()*+\-./0-9:<=>?@A-Z\[\]^_`a-z{|}~]+$")
+_COOKIE_KNOWN_ATTRS = frozenset( # AKA Morsel._reserved
+ (
+ "path",
+ "domain",
+ "max-age",
+ "expires",
+ "secure",
+ "httponly",
+ "samesite",
+ "partitioned",
+ "version",
+ "comment",
+ )
+)
+_COOKIE_BOOL_ATTRS = frozenset( # AKA Morsel._flags
+ ("secure", "httponly", "partitioned")
+)
+
+# SimpleCookie's pattern for parsing cookies with relaxed validation
+# Based on http.cookies pattern but extended to allow more characters in cookie names
+# to handle real-world cookies (fixes #2683)
+_COOKIE_PATTERN = re.compile(
+ r"""
+ \s* # Optional whitespace at start of cookie
+ (?P<key> # Start of group 'key'
+ # aiohttp has extended to include [] for compatibility with real-world cookies
+ [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\[\]]+ # Any word of at least one letter
+ ) # End of group 'key'
+ ( # Optional group: there may not be a value.
+ \s*=\s* # Equal Sign
+ (?P<val> # Start of group 'val'
+ "(?:[^\\"]|\\.)*" # Any double-quoted string (properly closed)
+ | # or
+ "[^";]* # Unmatched opening quote (differs from SimpleCookie - issue #7993)
+ | # or
+ # Special case for "expires" attr - RFC 822, RFC 850, RFC 1036, RFC 1123
+ (\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day (with comma)
+ [\w\d\s-]{9,11}\s[\d:]{8}\s # Date and time in specific format
+ (GMT|[+-]\d{4}) # Timezone: GMT or RFC 2822 offset like -0000, +0100
+ # NOTE: RFC 2822 timezone support is an aiohttp extension
+ # for issue #4493 - SimpleCookie does NOT support this
+ | # or
+ # ANSI C asctime() format: "Wed Jun 9 10:18:14 2021"
+ # NOTE: This is an aiohttp extension for issue #4327 - SimpleCookie does NOT support this format
+ \w{3}\s+\w{3}\s+[\s\d]\d\s+\d{2}:\d{2}:\d{2}\s+\d{4}
+ | # or
+ [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]* # Any word or empty string
+ ) # End of group 'val'
+ )? # End of optional value group
+ \s* # Any number of spaces.
+ (\s+|;|$) # Ending either at space, semicolon, or EOS.
+ """,
+ re.VERBOSE | re.ASCII,
+)
+
+
+def preserve_morsel_with_coded_value(cookie: Morsel[str]) -> Morsel[str]:
+ """
+ Preserve a Morsel's coded_value exactly as received from the server.
+
+ This function ensures that cookie encoding is preserved exactly as sent by
+ the server, which is critical for compatibility with old servers that have
+ strict requirements about cookie formats.
+
+ This addresses the issue described in https://github.com/aio-libs/aiohttp/pull/1453
+ where Python's SimpleCookie would re-encode cookies, breaking authentication
+ with certain servers.
+
+ Args:
+ cookie: A Morsel object from SimpleCookie
+
+ Returns:
+ A Morsel object with preserved coded_value
+
+ """
+ mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
+ # We use __setstate__ instead of the public set() API because it allows us to
+ # bypass validation and set already validated state. This is more stable than
+ # setting protected attributes directly and unlikely to change since it would
+ # break pickling.
+ mrsl_val.__setstate__( # type: ignore[attr-defined]
+ {"key": cookie.key, "value": cookie.value, "coded_value": cookie.coded_value}
+ )
+ return mrsl_val
+
+
+_unquote_sub = re.compile(r"\\(?:([0-3][0-7][0-7])|(.))").sub
+
+
+def _unquote_replace(m: re.Match[str]) -> str:
+ """
+ Replace function for _unquote_sub regex substitution.
+
+ Handles escaped characters in cookie values:
+ - Octal sequences are converted to their character representation
+ - Other escaped characters are unescaped by removing the backslash
+ """
+ if m[1]:
+ return chr(int(m[1], 8))
+ return m[2]
+
+
+def _unquote(value: str) -> str:
+ """
+ Unquote a cookie value.
+
+ Vendored from http.cookies._unquote to ensure compatibility.
+
+ Note: The original implementation checked for None, but we've removed
+ that check since all callers already ensure the value is not None.
+ """
+ # If there aren't any doublequotes,
+ # then there can't be any special characters. See RFC 2109.
+ if len(value) < 2:
+ return value
+ if value[0] != '"' or value[-1] != '"':
+ return value
+
+ # We have to assume that we must decode this string.
+ # Down to work.
+
+ # Remove the "s
+ value = value[1:-1]
+
+ # Check for special sequences. Examples:
+ # \012 --> \n
+ # \" --> "
+ #
+ return _unquote_sub(_unquote_replace, value)
+
+
+def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]:
+ """
+ Parse a Cookie header according to RFC 6265 Section 5.4.
+
+ Cookie headers contain only name-value pairs separated by semicolons.
+ There are no attributes in Cookie headers - even names that match
+ attribute names (like 'path' or 'secure') should be treated as cookies.
+
+ This parser uses the same regex-based approach as parse_set_cookie_headers
+ to properly handle quoted values that may contain semicolons. When the
+ regex fails to match a malformed cookie, it falls back to simple parsing
+ to ensure subsequent cookies are not lost
+ https://github.com/aio-libs/aiohttp/issues/11632
+
+ Args:
+ header: The Cookie header value to parse
+
+ Returns:
+ List of (name, Morsel) tuples for compatibility with SimpleCookie.update()
+ """
+ if not header:
+ return []
+
+ cookies: List[Tuple[str, Morsel[str]]] = []
+ morsel: Morsel[str]
+ i = 0
+ n = len(header)
+
+ invalid_names = []
+ while i < n:
+ # Use the same pattern as parse_set_cookie_headers to find cookies
+ match = _COOKIE_PATTERN.match(header, i)
+ if not match:
+ # Fallback for malformed cookies https://github.com/aio-libs/aiohttp/issues/11632
+ # Find next semicolon to skip or attempt simple key=value parsing
+ next_semi = header.find(";", i)
+ eq_pos = header.find("=", i)
+
+ # Try to extract key=value if '=' comes before ';'
+ if eq_pos != -1 and (next_semi == -1 or eq_pos < next_semi):
+ end_pos = next_semi if next_semi != -1 else n
+ key = header[i:eq_pos].strip()
+ value = header[eq_pos + 1 : end_pos].strip()
+
+ # Validate the name (same as regex path)
+ if not _COOKIE_NAME_RE.match(key):
+ invalid_names.append(key)
+ else:
+ morsel = Morsel()
+ morsel.__setstate__( # type: ignore[attr-defined]
+ {"key": key, "value": _unquote(value), "coded_value": value}
+ )
+ cookies.append((key, morsel))
+
+ # Move to next cookie or end
+ i = next_semi + 1 if next_semi != -1 else n
+ continue
+
+ key = match.group("key")
+ value = match.group("val") or ""
+ i = match.end(0)
+
+ # Validate the name
+ if not key or not _COOKIE_NAME_RE.match(key):
+ invalid_names.append(key)
+ continue
+
+ # Create new morsel
+ morsel = Morsel()
+ # Preserve the original value as coded_value (with quotes if present)
+ # We use __setstate__ instead of the public set() API because it allows us to
+ # bypass validation and set already validated state. This is more stable than
+ # setting protected attributes directly and unlikely to change since it would
+ # break pickling.
+ morsel.__setstate__( # type: ignore[attr-defined]
+ {"key": key, "value": _unquote(value), "coded_value": value}
+ )
+
+ cookies.append((key, morsel))
+
+ if invalid_names:
+ internal_logger.debug(
+ "Cannot load cookie. Illegal cookie names: %r", invalid_names
+ )
+
+ return cookies
+
+
+def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]:
+ """
+ Parse cookie headers using a vendored version of SimpleCookie parsing.
+
+ This implementation is based on SimpleCookie.__parse_string to ensure
+ compatibility with how SimpleCookie parses cookies, including handling
+ of malformed cookies with missing semicolons.
+
+ This function is used for both Cookie and Set-Cookie headers in order to be
+ forgiving. Ideally we would have followed RFC 6265 Section 5.2 (for Cookie
+ headers) and RFC 6265 Section 4.2.1 (for Set-Cookie headers), but the
+ real world data makes it impossible since we need to be a bit more forgiving.
+
+ NOTE: This implementation differs from SimpleCookie in handling unmatched quotes.
+ SimpleCookie will stop parsing when it encounters a cookie value with an unmatched
+ quote (e.g., 'cookie="value'), causing subsequent cookies to be silently dropped.
+ This implementation handles unmatched quotes more gracefully to prevent cookie loss.
+ See https://github.com/aio-libs/aiohttp/issues/7993
+ """
+ parsed_cookies: List[Tuple[str, Morsel[str]]] = []
+
+ for header in headers:
+ if not header:
+ continue
+
+ # Parse cookie string using SimpleCookie's algorithm
+ i = 0
+ n = len(header)
+ current_morsel: Optional[Morsel[str]] = None
+ morsel_seen = False
+
+ while 0 <= i < n:
+ # Start looking for a cookie
+ match = _COOKIE_PATTERN.match(header, i)
+ if not match:
+ # No more cookies
+ break
+
+ key, value = match.group("key"), match.group("val")
+ i = match.end(0)
+ lower_key = key.lower()
+
+ if key[0] == "$":
+ if not morsel_seen:
+ # We ignore attributes which pertain to the cookie
+ # mechanism as a whole, such as "$Version".
+ continue
+ # Process as attribute
+ if current_morsel is not None:
+ attr_lower_key = lower_key[1:]
+ if attr_lower_key in _COOKIE_KNOWN_ATTRS:
+ current_morsel[attr_lower_key] = value or ""
+ elif lower_key in _COOKIE_KNOWN_ATTRS:
+ if not morsel_seen:
+ # Invalid cookie string - attribute before cookie
+ break
+ if lower_key in _COOKIE_BOOL_ATTRS:
+ # Boolean attribute with any value should be True
+ if current_morsel is not None and current_morsel.isReservedKey(key):
+ current_morsel[lower_key] = True
+ elif value is None:
+ # Invalid cookie string - non-boolean attribute without value
+ break
+ elif current_morsel is not None:
+ # Regular attribute with value
+ current_morsel[lower_key] = _unquote(value)
+ elif value is not None:
+ # This is a cookie name=value pair
+ # Validate the name
+ if key in _COOKIE_KNOWN_ATTRS or not _COOKIE_NAME_RE.match(key):
+ internal_logger.warning(
+ "Can not load cookies: Illegal cookie name %r", key
+ )
+ current_morsel = None
+ else:
+ # Create new morsel
+ current_morsel = Morsel()
+ # Preserve the original value as coded_value (with quotes if present)
+ # We use __setstate__ instead of the public set() API because it allows us to
+ # bypass validation and set already validated state. This is more stable than
+ # setting protected attributes directly and unlikely to change since it would
+ # break pickling.
+ current_morsel.__setstate__( # type: ignore[attr-defined]
+ {"key": key, "value": _unquote(value), "coded_value": value}
+ )
+ parsed_cookies.append((key, current_morsel))
+ morsel_seen = True
+ else:
+ # Invalid cookie string - no value for non-attribute
+ break
+
+ return parsed_cookies
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_cparser.pxd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_cparser.pxd"
new file mode 100644
index 0000000..1b3be6d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_cparser.pxd"
@@ -0,0 +1,158 @@
+from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
+
+
+cdef extern from "llhttp.h":
+
+ struct llhttp__internal_s:
+ int32_t _index
+ void* _span_pos0
+ void* _span_cb0
+ int32_t error
+ const char* reason
+ const char* error_pos
+ void* data
+ void* _current
+ uint64_t content_length
+ uint8_t type
+ uint8_t method
+ uint8_t http_major
+ uint8_t http_minor
+ uint8_t header_state
+ uint8_t lenient_flags
+ uint8_t upgrade
+ uint8_t finish
+ uint16_t flags
+ uint16_t status_code
+ void* settings
+
+ ctypedef llhttp__internal_s llhttp__internal_t
+ ctypedef llhttp__internal_t llhttp_t
+
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
+
+ struct llhttp_settings_s:
+ llhttp_cb on_message_begin
+ llhttp_data_cb on_url
+ llhttp_data_cb on_status
+ llhttp_data_cb on_header_field
+ llhttp_data_cb on_header_value
+ llhttp_cb on_headers_complete
+ llhttp_data_cb on_body
+ llhttp_cb on_message_complete
+ llhttp_cb on_chunk_header
+ llhttp_cb on_chunk_complete
+
+ llhttp_cb on_url_complete
+ llhttp_cb on_status_complete
+ llhttp_cb on_header_field_complete
+ llhttp_cb on_header_value_complete
+
+ ctypedef llhttp_settings_s llhttp_settings_t
+
+ enum llhttp_errno:
+ HPE_OK,
+ HPE_INTERNAL,
+ HPE_STRICT,
+ HPE_LF_EXPECTED,
+ HPE_UNEXPECTED_CONTENT_LENGTH,
+ HPE_CLOSED_CONNECTION,
+ HPE_INVALID_METHOD,
+ HPE_INVALID_URL,
+ HPE_INVALID_CONSTANT,
+ HPE_INVALID_VERSION,
+ HPE_INVALID_HEADER_TOKEN,
+ HPE_INVALID_CONTENT_LENGTH,
+ HPE_INVALID_CHUNK_SIZE,
+ HPE_INVALID_STATUS,
+ HPE_INVALID_EOF_STATE,
+ HPE_INVALID_TRANSFER_ENCODING,
+ HPE_CB_MESSAGE_BEGIN,
+ HPE_CB_HEADERS_COMPLETE,
+ HPE_CB_MESSAGE_COMPLETE,
+ HPE_CB_CHUNK_HEADER,
+ HPE_CB_CHUNK_COMPLETE,
+ HPE_PAUSED,
+ HPE_PAUSED_UPGRADE,
+ HPE_USER
+
+ ctypedef llhttp_errno llhttp_errno_t
+
+ enum llhttp_flags:
+ F_CHUNKED,
+ F_CONTENT_LENGTH
+
+ enum llhttp_type:
+ HTTP_REQUEST,
+ HTTP_RESPONSE,
+ HTTP_BOTH
+
+ enum llhttp_method:
+ HTTP_DELETE,
+ HTTP_GET,
+ HTTP_HEAD,
+ HTTP_POST,
+ HTTP_PUT,
+ HTTP_CONNECT,
+ HTTP_OPTIONS,
+ HTTP_TRACE,
+ HTTP_COPY,
+ HTTP_LOCK,
+ HTTP_MKCOL,
+ HTTP_MOVE,
+ HTTP_PROPFIND,
+ HTTP_PROPPATCH,
+ HTTP_SEARCH,
+ HTTP_UNLOCK,
+ HTTP_BIND,
+ HTTP_REBIND,
+ HTTP_UNBIND,
+ HTTP_ACL,
+ HTTP_REPORT,
+ HTTP_MKACTIVITY,
+ HTTP_CHECKOUT,
+ HTTP_MERGE,
+ HTTP_MSEARCH,
+ HTTP_NOTIFY,
+ HTTP_SUBSCRIBE,
+ HTTP_UNSUBSCRIBE,
+ HTTP_PATCH,
+ HTTP_PURGE,
+ HTTP_MKCALENDAR,
+ HTTP_LINK,
+ HTTP_UNLINK,
+ HTTP_SOURCE,
+ HTTP_PRI,
+ HTTP_DESCRIBE,
+ HTTP_ANNOUNCE,
+ HTTP_SETUP,
+ HTTP_PLAY,
+ HTTP_PAUSE,
+ HTTP_TEARDOWN,
+ HTTP_GET_PARAMETER,
+ HTTP_SET_PARAMETER,
+ HTTP_REDIRECT,
+ HTTP_RECORD,
+ HTTP_FLUSH
+
+ ctypedef llhttp_method llhttp_method_t;
+
+ void llhttp_settings_init(llhttp_settings_t* settings)
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
+ const llhttp_settings_t* settings)
+
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
+
+ int llhttp_should_keep_alive(const llhttp_t* parser)
+
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
+
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
+
+ const char* llhttp_method_name(llhttp_method_t method)
+
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_find_header.pxd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_find_header.pxd"
new file mode 100644
index 0000000..37a6c37
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_find_header.pxd"
@@ -0,0 +1,2 @@
+cdef extern from "_find_header.h":
+ int find_header(char *, int)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_headers.pxi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_headers.pxi"
new file mode 100644
index 0000000..3744721
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_headers.pxi"
@@ -0,0 +1,83 @@
+# The file is autogenerated from aiohttp/hdrs.py
+# Run ./tools/gen.py to update it after the origin changing.
+
+from . import hdrs
+cdef tuple headers = (
+ hdrs.ACCEPT,
+ hdrs.ACCEPT_CHARSET,
+ hdrs.ACCEPT_ENCODING,
+ hdrs.ACCEPT_LANGUAGE,
+ hdrs.ACCEPT_RANGES,
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
+ hdrs.ACCESS_CONTROL_MAX_AGE,
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
+ hdrs.AGE,
+ hdrs.ALLOW,
+ hdrs.AUTHORIZATION,
+ hdrs.CACHE_CONTROL,
+ hdrs.CONNECTION,
+ hdrs.CONTENT_DISPOSITION,
+ hdrs.CONTENT_ENCODING,
+ hdrs.CONTENT_LANGUAGE,
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_MD5,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TRANSFER_ENCODING,
+ hdrs.CONTENT_TYPE,
+ hdrs.COOKIE,
+ hdrs.DATE,
+ hdrs.DESTINATION,
+ hdrs.DIGEST,
+ hdrs.ETAG,
+ hdrs.EXPECT,
+ hdrs.EXPIRES,
+ hdrs.FORWARDED,
+ hdrs.FROM,
+ hdrs.HOST,
+ hdrs.IF_MATCH,
+ hdrs.IF_MODIFIED_SINCE,
+ hdrs.IF_NONE_MATCH,
+ hdrs.IF_RANGE,
+ hdrs.IF_UNMODIFIED_SINCE,
+ hdrs.KEEP_ALIVE,
+ hdrs.LAST_EVENT_ID,
+ hdrs.LAST_MODIFIED,
+ hdrs.LINK,
+ hdrs.LOCATION,
+ hdrs.MAX_FORWARDS,
+ hdrs.ORIGIN,
+ hdrs.PRAGMA,
+ hdrs.PROXY_AUTHENTICATE,
+ hdrs.PROXY_AUTHORIZATION,
+ hdrs.RANGE,
+ hdrs.REFERER,
+ hdrs.RETRY_AFTER,
+ hdrs.SEC_WEBSOCKET_ACCEPT,
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
+ hdrs.SEC_WEBSOCKET_KEY,
+ hdrs.SEC_WEBSOCKET_KEY1,
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
+ hdrs.SEC_WEBSOCKET_VERSION,
+ hdrs.SERVER,
+ hdrs.SET_COOKIE,
+ hdrs.TE,
+ hdrs.TRAILER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.URI,
+ hdrs.UPGRADE,
+ hdrs.USER_AGENT,
+ hdrs.VARY,
+ hdrs.VIA,
+ hdrs.WWW_AUTHENTICATE,
+ hdrs.WANT_DIGEST,
+ hdrs.WARNING,
+ hdrs.X_FORWARDED_FOR,
+ hdrs.X_FORWARDED_HOST,
+ hdrs.X_FORWARDED_PROTO,
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..c151985
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.pyx" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.pyx"
new file mode 100644
index 0000000..4a7101e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_parser.pyx"
@@ -0,0 +1,835 @@
+# Based on https://github.com/MagicStack/httptools
+#
+
+from cpython cimport (
+ Py_buffer,
+ PyBUF_SIMPLE,
+ PyBuffer_Release,
+ PyBytes_AsString,
+ PyBytes_AsStringAndSize,
+ PyObject_GetBuffer,
+)
+from cpython.mem cimport PyMem_Free, PyMem_Malloc
+from libc.limits cimport ULLONG_MAX
+from libc.string cimport memcpy
+
+from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
+from yarl import URL as _URL
+
+from aiohttp import hdrs
+from aiohttp.helpers import DEBUG, set_exception
+
+from .http_exceptions import (
+ BadHttpMessage,
+ BadHttpMethod,
+ BadStatusLine,
+ ContentLengthError,
+ InvalidHeader,
+ InvalidURLError,
+ LineTooLong,
+ PayloadEncodingError,
+ TransferEncodingError,
+)
+from .http_parser import DeflateBuffer as _DeflateBuffer
+from .http_writer import (
+ HttpVersion as _HttpVersion,
+ HttpVersion10 as _HttpVersion10,
+ HttpVersion11 as _HttpVersion11,
+)
+from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
+
+cimport cython
+
+from aiohttp cimport _cparser as cparser
+
+include "_headers.pxi"
+
+from aiohttp cimport _find_header
+
+ALLOWED_UPGRADES = frozenset({"websocket"})
+DEF DEFAULT_FREELIST_SIZE = 250
+
+cdef extern from "Python.h":
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
+ Py_ssize_t PyByteArray_Size(object) except -1
+ char* PyByteArray_AsString(object)
+
+__all__ = ('HttpRequestParser', 'HttpResponseParser',
+ 'RawRequestMessage', 'RawResponseMessage')
+
+cdef object URL = _URL
+cdef object URL_build = URL.build
+cdef object CIMultiDict = _CIMultiDict
+cdef object CIMultiDictProxy = _CIMultiDictProxy
+cdef object HttpVersion = _HttpVersion
+cdef object HttpVersion10 = _HttpVersion10
+cdef object HttpVersion11 = _HttpVersion11
+cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
+cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
+cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
+cdef object StreamReader = _StreamReader
+cdef object DeflateBuffer = _DeflateBuffer
+cdef bytes EMPTY_BYTES = b""
+
+cdef inline object extend(object buf, const char* at, size_t length):
+ cdef Py_ssize_t s
+ cdef char* ptr
+ s = PyByteArray_Size(buf)
+ PyByteArray_Resize(buf, s + length)
+ ptr = PyByteArray_AsString(buf)
+ memcpy(ptr + s, at, length)
+
+
+DEF METHODS_COUNT = 46;
+
+cdef list _http_method = []
+
+for i in range(METHODS_COUNT):
+ _http_method.append(
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
+
+
+cdef inline str http_method_str(int i):
+ if i < METHODS_COUNT:
+ return <str>_http_method[i]
+ else:
+ return "<unknown>"
+
+cdef inline object find_header(bytes raw_header):
+ cdef Py_ssize_t size
+ cdef char *buf
+ cdef int idx
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
+ idx = _find_header.find_header(buf, size)
+ if idx == -1:
+ return raw_header.decode('utf-8', 'surrogateescape')
+ return headers[idx]
+
+
+@cython.freelist(DEFAULT_FREELIST_SIZE)
+cdef class RawRequestMessage:
+ cdef readonly str method
+ cdef readonly str path
+ cdef readonly object version # HttpVersion
+ cdef readonly object headers # CIMultiDict
+ cdef readonly object raw_headers # tuple
+ cdef readonly object should_close
+ cdef readonly object compression
+ cdef readonly object upgrade
+ cdef readonly object chunked
+ cdef readonly object url # yarl.URL
+
+ def __init__(self, method, path, version, headers, raw_headers,
+ should_close, compression, upgrade, chunked, url):
+ self.method = method
+ self.path = path
+ self.version = version
+ self.headers = headers
+ self.raw_headers = raw_headers
+ self.should_close = should_close
+ self.compression = compression
+ self.upgrade = upgrade
+ self.chunked = chunked
+ self.url = url
+
+ def __repr__(self):
+ info = []
+ info.append(("method", self.method))
+ info.append(("path", self.path))
+ info.append(("version", self.version))
+ info.append(("headers", self.headers))
+ info.append(("raw_headers", self.raw_headers))
+ info.append(("should_close", self.should_close))
+ info.append(("compression", self.compression))
+ info.append(("upgrade", self.upgrade))
+ info.append(("chunked", self.chunked))
+ info.append(("url", self.url))
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
+ return '<RawRequestMessage(' + sinfo + ')>'
+
+ def _replace(self, **dct):
+ cdef RawRequestMessage ret
+ ret = _new_request_message(self.method,
+ self.path,
+ self.version,
+ self.headers,
+ self.raw_headers,
+ self.should_close,
+ self.compression,
+ self.upgrade,
+ self.chunked,
+ self.url)
+ if "method" in dct:
+ ret.method = dct["method"]
+ if "path" in dct:
+ ret.path = dct["path"]
+ if "version" in dct:
+ ret.version = dct["version"]
+ if "headers" in dct:
+ ret.headers = dct["headers"]
+ if "raw_headers" in dct:
+ ret.raw_headers = dct["raw_headers"]
+ if "should_close" in dct:
+ ret.should_close = dct["should_close"]
+ if "compression" in dct:
+ ret.compression = dct["compression"]
+ if "upgrade" in dct:
+ ret.upgrade = dct["upgrade"]
+ if "chunked" in dct:
+ ret.chunked = dct["chunked"]
+ if "url" in dct:
+ ret.url = dct["url"]
+ return ret
+
+cdef _new_request_message(str method,
+ str path,
+ object version,
+ object headers,
+ object raw_headers,
+ bint should_close,
+ object compression,
+ bint upgrade,
+ bint chunked,
+ object url):
+ cdef RawRequestMessage ret
+ ret = RawRequestMessage.__new__(RawRequestMessage)
+ ret.method = method
+ ret.path = path
+ ret.version = version
+ ret.headers = headers
+ ret.raw_headers = raw_headers
+ ret.should_close = should_close
+ ret.compression = compression
+ ret.upgrade = upgrade
+ ret.chunked = chunked
+ ret.url = url
+ return ret
+
+
+@cython.freelist(DEFAULT_FREELIST_SIZE)
+cdef class RawResponseMessage:
+ cdef readonly object version # HttpVersion
+ cdef readonly int code
+ cdef readonly str reason
+ cdef readonly object headers # CIMultiDict
+ cdef readonly object raw_headers # tuple
+ cdef readonly object should_close
+ cdef readonly object compression
+ cdef readonly object upgrade
+ cdef readonly object chunked
+
+ def __init__(self, version, code, reason, headers, raw_headers,
+ should_close, compression, upgrade, chunked):
+ self.version = version
+ self.code = code
+ self.reason = reason
+ self.headers = headers
+ self.raw_headers = raw_headers
+ self.should_close = should_close
+ self.compression = compression
+ self.upgrade = upgrade
+ self.chunked = chunked
+
+ def __repr__(self):
+ info = []
+ info.append(("version", self.version))
+ info.append(("code", self.code))
+ info.append(("reason", self.reason))
+ info.append(("headers", self.headers))
+ info.append(("raw_headers", self.raw_headers))
+ info.append(("should_close", self.should_close))
+ info.append(("compression", self.compression))
+ info.append(("upgrade", self.upgrade))
+ info.append(("chunked", self.chunked))
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
+ return '<RawResponseMessage(' + sinfo + ')>'
+
+
+cdef _new_response_message(object version,
+ int code,
+ str reason,
+ object headers,
+ object raw_headers,
+ bint should_close,
+ object compression,
+ bint upgrade,
+ bint chunked):
+ cdef RawResponseMessage ret
+ ret = RawResponseMessage.__new__(RawResponseMessage)
+ ret.version = version
+ ret.code = code
+ ret.reason = reason
+ ret.headers = headers
+ ret.raw_headers = raw_headers
+ ret.should_close = should_close
+ ret.compression = compression
+ ret.upgrade = upgrade
+ ret.chunked = chunked
+ return ret
+
+
+@cython.internal
+cdef class HttpParser:
+
+ cdef:
+ cparser.llhttp_t* _cparser
+ cparser.llhttp_settings_t* _csettings
+
+ bytes _raw_name
+ object _name
+ bytes _raw_value
+ bint _has_value
+
+ object _protocol
+ object _loop
+ object _timer
+
+ size_t _max_line_size
+ size_t _max_field_size
+ size_t _max_headers
+ bint _response_with_body
+ bint _read_until_eof
+
+ bint _started
+ object _url
+ bytearray _buf
+ str _path
+ str _reason
+ list _headers
+ list _raw_headers
+ bint _upgraded
+ list _messages
+ object _payload
+ bint _payload_error
+ object _payload_exception
+ object _last_error
+ bint _auto_decompress
+ int _limit
+
+ str _content_encoding
+
+ Py_buffer py_buf
+
+ def __cinit__(self):
+ self._cparser = <cparser.llhttp_t*> \
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
+ if self._cparser is NULL:
+ raise MemoryError()
+
+ self._csettings = <cparser.llhttp_settings_t*> \
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
+ if self._csettings is NULL:
+ raise MemoryError()
+
+ def __dealloc__(self):
+ PyMem_Free(self._cparser)
+ PyMem_Free(self._csettings)
+
+ cdef _init(
+ self, cparser.llhttp_type mode,
+ object protocol, object loop, int limit,
+ object timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True,
+ ):
+ cparser.llhttp_settings_init(self._csettings)
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
+ self._cparser.data = <void*>self
+ self._cparser.content_length = 0
+
+ self._protocol = protocol
+ self._loop = loop
+ self._timer = timer
+
+ self._buf = bytearray()
+ self._payload = None
+ self._payload_error = 0
+ self._payload_exception = payload_exception
+ self._messages = []
+
+ self._raw_name = EMPTY_BYTES
+ self._raw_value = EMPTY_BYTES
+ self._has_value = False
+
+ self._max_line_size = max_line_size
+ self._max_headers = max_headers
+ self._max_field_size = max_field_size
+ self._response_with_body = response_with_body
+ self._read_until_eof = read_until_eof
+ self._upgraded = False
+ self._auto_decompress = auto_decompress
+ self._content_encoding = None
+
+ self._csettings.on_url = cb_on_url
+ self._csettings.on_status = cb_on_status
+ self._csettings.on_header_field = cb_on_header_field
+ self._csettings.on_header_value = cb_on_header_value
+ self._csettings.on_headers_complete = cb_on_headers_complete
+ self._csettings.on_body = cb_on_body
+ self._csettings.on_message_begin = cb_on_message_begin
+ self._csettings.on_message_complete = cb_on_message_complete
+ self._csettings.on_chunk_header = cb_on_chunk_header
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
+
+ self._last_error = None
+ self._limit = limit
+
+ cdef _process_header(self):
+ cdef str value
+ if self._raw_name is not EMPTY_BYTES:
+ name = find_header(self._raw_name)
+ value = self._raw_value.decode('utf-8', 'surrogateescape')
+
+ self._headers.append((name, value))
+
+ if name is CONTENT_ENCODING:
+ self._content_encoding = value
+
+ self._has_value = False
+ self._raw_headers.append((self._raw_name, self._raw_value))
+ self._raw_name = EMPTY_BYTES
+ self._raw_value = EMPTY_BYTES
+
+ cdef _on_header_field(self, char* at, size_t length):
+ if self._has_value:
+ self._process_header()
+
+ if self._raw_name is EMPTY_BYTES:
+ self._raw_name = at[:length]
+ else:
+ self._raw_name += at[:length]
+
+ cdef _on_header_value(self, char* at, size_t length):
+ if self._raw_value is EMPTY_BYTES:
+ self._raw_value = at[:length]
+ else:
+ self._raw_value += at[:length]
+ self._has_value = True
+
+ cdef _on_headers_complete(self):
+ self._process_header()
+
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
+ upgrade = self._cparser.upgrade
+ chunked = self._cparser.flags & cparser.F_CHUNKED
+
+ raw_headers = tuple(self._raw_headers)
+ headers = CIMultiDictProxy(CIMultiDict(self._headers))
+
+ if self._cparser.type == cparser.HTTP_REQUEST:
+ h_upg = headers.get("upgrade", "")
+ allowed = upgrade and h_upg.isascii() and h_upg.lower() in ALLOWED_UPGRADES
+ if allowed or self._cparser.method == cparser.HTTP_CONNECT:
+ self._upgraded = True
+ else:
+ if upgrade and self._cparser.status_code == 101:
+ self._upgraded = True
+
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ encoding = None
+ enc = self._content_encoding
+ if enc is not None:
+ self._content_encoding = None
+ if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}:
+ encoding = enc
+
+ if self._cparser.type == cparser.HTTP_REQUEST:
+ method = http_method_str(self._cparser.method)
+ msg = _new_request_message(
+ method, self._path,
+ self.http_version(), headers, raw_headers,
+ should_close, encoding, upgrade, chunked, self._url)
+ else:
+ msg = _new_response_message(
+ self.http_version(), self._cparser.status_code, self._reason,
+ headers, raw_headers, should_close, encoding,
+ upgrade, chunked)
+
+ if (
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
+ self._cparser.method == cparser.HTTP_CONNECT or
+ (self._cparser.status_code >= 199 and
+ self._cparser.content_length == 0 and
+ self._read_until_eof)
+ ):
+ payload = StreamReader(
+ self._protocol, timer=self._timer, loop=self._loop,
+ limit=self._limit)
+ else:
+ payload = EMPTY_PAYLOAD
+
+ self._payload = payload
+ if encoding is not None and self._auto_decompress:
+ self._payload = DeflateBuffer(payload, encoding)
+
+ if not self._response_with_body:
+ payload = EMPTY_PAYLOAD
+
+ self._messages.append((msg, payload))
+
+ cdef _on_message_complete(self):
+ self._payload.feed_eof()
+ self._payload = None
+
+ cdef _on_chunk_header(self):
+ self._payload.begin_http_chunk_receiving()
+
+ cdef _on_chunk_complete(self):
+ self._payload.end_http_chunk_receiving()
+
+ cdef object _on_status_complete(self):
+ pass
+
+ cdef inline http_version(self):
+ cdef cparser.llhttp_t* parser = self._cparser
+
+ if parser.http_major == 1:
+ if parser.http_minor == 0:
+ return HttpVersion10
+ elif parser.http_minor == 1:
+ return HttpVersion11
+
+ return HttpVersion(parser.http_major, parser.http_minor)
+
+ ### Public API ###
+
+ def feed_eof(self):
+ cdef bytes desc
+
+ if self._payload is not None:
+ if self._cparser.flags & cparser.F_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data to satisfy transfer length header.")
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
+ raise ContentLengthError(
+ "Not enough data to satisfy content length header.")
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
+ desc = cparser.llhttp_get_error_reason(self._cparser)
+ raise PayloadEncodingError(desc.decode('latin-1'))
+ else:
+ self._payload.feed_eof()
+ elif self._started:
+ self._on_headers_complete()
+ if self._messages:
+ return self._messages[-1][0]
+
+ def feed_data(self, data):
+ cdef:
+ size_t data_len
+ size_t nb
+ cdef cparser.llhttp_errno_t errno
+
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
+ data_len = <size_t>self.py_buf.len
+
+ errno = cparser.llhttp_execute(
+ self._cparser,
+ <char*>self.py_buf.buf,
+ data_len)
+
+ if errno is cparser.HPE_PAUSED_UPGRADE:
+ cparser.llhttp_resume_after_upgrade(self._cparser)
+
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
+
+ PyBuffer_Release(&self.py_buf)
+
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
+ if self._payload_error == 0:
+ if self._last_error is not None:
+ ex = self._last_error
+ self._last_error = None
+ else:
+ after = cparser.llhttp_get_error_pos(self._cparser)
+ before = data[:after - <char*>self.py_buf.buf]
+ after_b = after.split(b"\r\n", 1)[0]
+ before = before.rsplit(b"\r\n", 1)[-1]
+ data = before + after_b
+ pointer = " " * (len(repr(before))-1) + "^"
+ ex = parser_error_from_errno(self._cparser, data, pointer)
+ self._payload = None
+ raise ex
+
+ if self._messages:
+ messages = self._messages
+ self._messages = []
+ else:
+ messages = ()
+
+ if self._upgraded:
+ return messages, True, data[nb:]
+ else:
+ return messages, False, b""
+
+ def set_upgraded(self, val):
+ self._upgraded = val
+
+
+cdef class HttpRequestParser(HttpParser):
+
+ def __init__(
+ self, protocol, loop, int limit, timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True,
+ ):
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
+ max_line_size, max_headers, max_field_size,
+ payload_exception, response_with_body, read_until_eof,
+ auto_decompress)
+
+ cdef object _on_status_complete(self):
+ cdef int idx1, idx2
+ if not self._buf:
+ return
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
+ try:
+ idx3 = len(self._path)
+ if self._cparser.method == cparser.HTTP_CONNECT:
+ # authority-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
+ self._url = URL.build(authority=self._path, encoded=True)
+ elif idx3 > 1 and self._path[0] == '/':
+ # origin-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
+ idx1 = self._path.find("?")
+ if idx1 == -1:
+ query = ""
+ idx2 = self._path.find("#")
+ if idx2 == -1:
+ path = self._path
+ fragment = ""
+ else:
+ path = self._path[0: idx2]
+ fragment = self._path[idx2+1:]
+
+ else:
+ path = self._path[0:idx1]
+ idx1 += 1
+ idx2 = self._path.find("#", idx1+1)
+ if idx2 == -1:
+ query = self._path[idx1:]
+ fragment = ""
+ else:
+ query = self._path[idx1: idx2]
+ fragment = self._path[idx2+1:]
+
+ self._url = URL.build(
+ path=path,
+ query_string=query,
+ fragment=fragment,
+ encoded=True,
+ )
+ else:
+ # absolute-form for proxy maybe,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
+ self._url = URL(self._path, encoded=True)
+ finally:
+ PyByteArray_Resize(self._buf, 0)
+
+
+cdef class HttpResponseParser(HttpParser):
+
+ def __init__(
+ self, protocol, loop, int limit, timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True
+ ):
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
+ max_line_size, max_headers, max_field_size,
+ payload_exception, response_with_body, read_until_eof,
+ auto_decompress)
+ # Use strict parsing on dev mode, so users are warned about broken servers.
+ if not DEBUG:
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
+
+ cdef object _on_status_complete(self):
+ if self._buf:
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
+ PyByteArray_Resize(self._buf, 0)
+ else:
+ self._reason = self._reason or ''
+
+cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+
+ pyparser._started = True
+ pyparser._headers = []
+ pyparser._raw_headers = []
+ PyByteArray_Resize(pyparser._buf, 0)
+ pyparser._path = None
+ pyparser._reason = None
+ return 0
+
+
+cdef int cb_on_url(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ if length > pyparser._max_line_size:
+ raise LineTooLong(
+ 'Status line is too long', pyparser._max_line_size, length)
+ extend(pyparser._buf, at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_status(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef str reason
+ try:
+ if length > pyparser._max_line_size:
+ raise LineTooLong(
+ 'Status line is too long', pyparser._max_line_size, length)
+ extend(pyparser._buf, at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_header_field(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef Py_ssize_t size
+ try:
+ pyparser._on_status_complete()
+ size = len(pyparser._raw_name) + length
+ if size > pyparser._max_field_size:
+ raise LineTooLong(
+ 'Header name is too long', pyparser._max_field_size, size)
+ pyparser._on_header_field(at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_header_value(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef Py_ssize_t size
+ try:
+ size = len(pyparser._raw_value) + length
+ if size > pyparser._max_field_size:
+ raise LineTooLong(
+ 'Header value is too long', pyparser._max_field_size, size)
+ pyparser._on_header_value(at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_status_complete()
+ pyparser._on_headers_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:
+ return 2
+ else:
+ return 0
+
+
+cdef int cb_on_body(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef bytes body = at[:length]
+ try:
+ pyparser._payload.feed_data(body, length)
+ except BaseException as underlying_exc:
+ reraised_exc = underlying_exc
+ if pyparser._payload_exception is not None:
+ reraised_exc = pyparser._payload_exception(str(underlying_exc))
+
+ set_exception(pyparser._payload, reraised_exc, underlying_exc)
+
+ pyparser._payload_error = 1
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._started = False
+ pyparser._on_message_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_chunk_header()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_chunk_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
+
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
+
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
+ cparser.HPE_CB_HEADERS_COMPLETE,
+ cparser.HPE_CB_MESSAGE_COMPLETE,
+ cparser.HPE_CB_CHUNK_HEADER,
+ cparser.HPE_CB_CHUNK_COMPLETE,
+ cparser.HPE_INVALID_CONSTANT,
+ cparser.HPE_INVALID_HEADER_TOKEN,
+ cparser.HPE_INVALID_CONTENT_LENGTH,
+ cparser.HPE_INVALID_CHUNK_SIZE,
+ cparser.HPE_INVALID_EOF_STATE,
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
+ return BadHttpMessage(err_msg)
+ elif errno == cparser.HPE_INVALID_METHOD:
+ return BadHttpMethod(error=err_msg)
+ elif errno in {cparser.HPE_INVALID_STATUS,
+ cparser.HPE_INVALID_VERSION}:
+ return BadStatusLine(error=err_msg)
+ elif errno == cparser.HPE_INVALID_URL:
+ return InvalidURLError(err_msg)
+
+ return BadHttpMessage(err_msg)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..2ec7690
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.pyx" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.pyx"
new file mode 100644
index 0000000..7989c18
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_http_writer.pyx"
@@ -0,0 +1,162 @@
+from cpython.bytes cimport PyBytes_FromStringAndSize
+from cpython.exc cimport PyErr_NoMemory
+from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
+from cpython.object cimport PyObject_Str
+from libc.stdint cimport uint8_t, uint64_t
+from libc.string cimport memcpy
+
+from multidict import istr
+
+DEF BUF_SIZE = 16 * 1024 # 16KiB
+
+cdef object _istr = istr
+
+
+# ----------------- writer ---------------------------
+
+cdef struct Writer:
+ char *buf
+ Py_ssize_t size
+ Py_ssize_t pos
+ bint heap_allocated
+
+cdef inline void _init_writer(Writer* writer, char *buf):
+ writer.buf = buf
+ writer.size = BUF_SIZE
+ writer.pos = 0
+ writer.heap_allocated = 0
+
+
+cdef inline void _release_writer(Writer* writer):
+ if writer.heap_allocated:
+ PyMem_Free(writer.buf)
+
+
+cdef inline int _write_byte(Writer* writer, uint8_t ch):
+ cdef char * buf
+ cdef Py_ssize_t size
+
+ if writer.pos == writer.size:
+ # reallocate
+ size = writer.size + BUF_SIZE
+ if not writer.heap_allocated:
+ buf = <char*>PyMem_Malloc(size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ memcpy(buf, writer.buf, writer.size)
+ else:
+ buf = <char*>PyMem_Realloc(writer.buf, size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ writer.buf = buf
+ writer.size = size
+ writer.heap_allocated = 1
+ writer.buf[writer.pos] = <char>ch
+ writer.pos += 1
+ return 0
+
+
+cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
+ cdef uint64_t utf = <uint64_t> symbol
+
+ if utf < 0x80:
+ return _write_byte(writer, <uint8_t>utf)
+ elif utf < 0x800:
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+ elif 0xD800 <= utf <= 0xDFFF:
+ # surogate pair, ignored
+ return 0
+ elif utf < 0x10000:
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
+ return -1
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+ elif utf > 0x10FFFF:
+ # symbol is too large
+ return 0
+ else:
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
+ return -1
+ if _write_byte(writer,
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
+ return -1
+ if _write_byte(writer,
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+
+
+cdef inline int _write_str(Writer* writer, str s):
+ cdef Py_UCS4 ch
+ for ch in s:
+ if _write_utf8(writer, ch) < 0:
+ return -1
+
+
+cdef inline int _write_str_raise_on_nlcr(Writer* writer, object s):
+ cdef Py_UCS4 ch
+ cdef str out_str
+ if type(s) is str:
+ out_str = <str>s
+ elif type(s) is _istr:
+ out_str = PyObject_Str(s)
+ elif not isinstance(s, str):
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
+ else:
+ out_str = str(s)
+
+ for ch in out_str:
+ if ch == 0x0D or ch == 0x0A:
+ raise ValueError(
+ "Newline or carriage return detected in headers. "
+ "Potential header injection attack."
+ )
+ if _write_utf8(writer, ch) < 0:
+ return -1
+
+
+# --------------- _serialize_headers ----------------------
+
+def _serialize_headers(str status_line, headers):
+ cdef Writer writer
+ cdef object key
+ cdef object val
+ cdef char buf[BUF_SIZE]
+
+ _init_writer(&writer, buf)
+
+ try:
+ if _write_str(&writer, status_line) < 0:
+ raise
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ for key, val in headers.items():
+ if _write_str_raise_on_nlcr(&writer, key) < 0:
+ raise
+ if _write_byte(&writer, b':') < 0:
+ raise
+ if _write_byte(&writer, b' ') < 0:
+ raise
+ if _write_str_raise_on_nlcr(&writer, val) < 0:
+ raise
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
+ finally:
+ _release_writer(&writer)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash"
new file mode 100644
index 0000000..59ec123
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash"
@@ -0,0 +1 @@
+e354dd499be171b6125bf56bc3b6c5e2bff2a28af69e3b5d699ddb9af2bafa3c *D:/a/aiohttp/aiohttp/aiohttp/_websocket/mask.pxd
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash"
new file mode 100644
index 0000000..025189e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash"
@@ -0,0 +1 @@
+468edd38ebf8dc7000a8d333df1c82035d69a5c9febc0448be3c9c4ad4c4630c *D:/a/aiohttp/aiohttp/aiohttp/_websocket/mask.pyx
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash"
new file mode 100644
index 0000000..0f260dc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash"
@@ -0,0 +1 @@
+1cd3a5e20456b4d04d11835b2bd3c639f14443052a2467b105b0ca07fdb4b25d *D:/a/aiohttp/aiohttp/aiohttp/_websocket/reader_c.pxd
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/__init__.py"
new file mode 100644
index 0000000..836257c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/__init__.py"
@@ -0,0 +1 @@
+"""WebSocket protocol versions 13 and 8."""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/helpers.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/helpers.py"
new file mode 100644
index 0000000..0bb58df
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/helpers.py"
@@ -0,0 +1,147 @@
+"""Helpers for WebSocket protocol versions 13 and 8."""
+
+import functools
+import re
+from struct import Struct
+from typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple
+
+from ..helpers import NO_EXTENSIONS
+from .models import WSHandshakeError
+
+UNPACK_LEN3 = Struct("!Q").unpack_from
+UNPACK_CLOSE_CODE = Struct("!H").unpack
+PACK_LEN1 = Struct("!BB").pack
+PACK_LEN2 = Struct("!BBH").pack
+PACK_LEN3 = Struct("!BBQ").pack
+PACK_CLOSE_CODE = Struct("!H").pack
+PACK_RANDBITS = Struct("!L").pack
+MSG_SIZE: Final[int] = 2**14
+MASK_LEN: Final[int] = 4
+
+WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+
+
+# Used by _websocket_mask_python
+@functools.lru_cache
+def _xor_table() -> List[bytes]:
+ return [bytes(a ^ b for a in range(256)) for b in range(256)]
+
+
+def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
+ """Websocket masking function.
+
+ `mask` is a `bytes` object of length 4; `data` is a `bytearray`
+ object of any length. The contents of `data` are masked with `mask`,
+ as specified in section 5.3 of RFC 6455.
+
+ Note that this function mutates the `data` argument.
+
+ This pure-python implementation may be replaced by an optimized
+ version when available.
+
+ """
+ assert isinstance(data, bytearray), data
+ assert len(mask) == 4, mask
+
+ if data:
+ _XOR_TABLE = _xor_table()
+ a, b, c, d = (_XOR_TABLE[n] for n in mask)
+ data[::4] = data[::4].translate(a)
+ data[1::4] = data[1::4].translate(b)
+ data[2::4] = data[2::4].translate(c)
+ data[3::4] = data[3::4].translate(d)
+
+
+if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover
+ websocket_mask = _websocket_mask_python
+else:
+ try:
+ from .mask import _websocket_mask_cython # type: ignore[import-not-found]
+
+ websocket_mask = _websocket_mask_cython
+ except ImportError: # pragma: no cover
+ websocket_mask = _websocket_mask_python
+
+
+_WS_EXT_RE: Final[Pattern[str]] = re.compile(
+ r"^(?:;\s*(?:"
+ r"(server_no_context_takeover)|"
+ r"(client_no_context_takeover)|"
+ r"(server_max_window_bits(?:=(\d+))?)|"
+ r"(client_max_window_bits(?:=(\d+))?)))*$"
+)
+
+_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
+
+
+def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
+ if not extstr:
+ return 0, False
+
+ compress = 0
+ notakeover = False
+ for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
+ defext = ext.group(1)
+ # Return compress = 15 when get `permessage-deflate`
+ if not defext:
+ compress = 15
+ break
+ match = _WS_EXT_RE.match(defext)
+ if match:
+ compress = 15
+ if isserver:
+ # Server never fail to detect compress handshake.
+ # Server does not need to send max wbit to client
+ if match.group(4):
+ compress = int(match.group(4))
+ # Group3 must match if group4 matches
+ # Compress wbit 8 does not support in zlib
+ # If compress level not support,
+ # CONTINUE to next extension
+ if compress > 15 or compress < 9:
+ compress = 0
+ continue
+ if match.group(1):
+ notakeover = True
+ # Ignore regex group 5 & 6 for client_max_window_bits
+ break
+ else:
+ if match.group(6):
+ compress = int(match.group(6))
+ # Group5 must match if group6 matches
+ # Compress wbit 8 does not support in zlib
+ # If compress level not support,
+ # FAIL the parse progress
+ if compress > 15 or compress < 9:
+ raise WSHandshakeError("Invalid window size")
+ if match.group(2):
+ notakeover = True
+ # Ignore regex group 5 & 6 for client_max_window_bits
+ break
+ # Return Fail if client side and not match
+ elif not isserver:
+ raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
+
+ return compress, notakeover
+
+
+def ws_ext_gen(
+ compress: int = 15, isserver: bool = False, server_notakeover: bool = False
+) -> str:
+ # client_notakeover=False not used for server
+ # compress wbit 8 does not support in zlib
+ if compress < 9 or compress > 15:
+ raise ValueError(
+ "Compress wbits must between 9 and 15, zlib does not support wbits=8"
+ )
+ enabledext = ["permessage-deflate"]
+ if not isserver:
+ enabledext.append("client_max_window_bits")
+
+ if compress < 15:
+ enabledext.append("server_max_window_bits=" + str(compress))
+ if server_notakeover:
+ enabledext.append("server_no_context_takeover")
+ # if client_notakeover:
+ # enabledext.append('client_no_context_takeover')
+ return "; ".join(enabledext)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..060d974
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pxd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pxd"
new file mode 100644
index 0000000..90983de
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pxd"
@@ -0,0 +1,3 @@
+"""Cython declarations for websocket masking."""
+
+cpdef void _websocket_mask_cython(bytes mask, bytearray data)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pyx" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pyx"
new file mode 100644
index 0000000..2d956c8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/mask.pyx"
@@ -0,0 +1,48 @@
+from cpython cimport PyBytes_AsString
+
+
+#from cpython cimport PyByteArray_AsString # cython still not exports that
+cdef extern from "Python.h":
+ char* PyByteArray_AsString(bytearray ba) except NULL
+
+from libc.stdint cimport uint32_t, uint64_t, uintmax_t
+
+
+cpdef void _websocket_mask_cython(bytes mask, bytearray data):
+ """Note, this function mutates its `data` argument
+ """
+ cdef:
+ Py_ssize_t data_len, i
+ # bit operations on signed integers are implementation-specific
+ unsigned char * in_buf
+ const unsigned char * mask_buf
+ uint32_t uint32_msk
+ uint64_t uint64_msk
+
+ assert len(mask) == 4
+
+ data_len = len(data)
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
+ uint32_msk = (<uint32_t*>mask_buf)[0]
+
+ # TODO: align in_data ptr to achieve even faster speeds
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
+
+ if sizeof(size_t) >= 8:
+ uint64_msk = uint32_msk
+ uint64_msk = (uint64_msk << 32) | uint32_msk
+
+ while data_len >= 8:
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
+ in_buf += 8
+ data_len -= 8
+
+
+ while data_len >= 4:
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
+ in_buf += 4
+ data_len -= 4
+
+ for i in range(0, data_len):
+ in_buf[i] ^= mask_buf[i]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/models.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/models.py"
new file mode 100644
index 0000000..7e89b96
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/models.py"
@@ -0,0 +1,84 @@
+"""Models for WebSocket protocol versions 13 and 8."""
+
+import json
+from enum import IntEnum
+from typing import Any, Callable, Final, NamedTuple, Optional, cast
+
+WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
+
+
+class WSCloseCode(IntEnum):
+ OK = 1000
+ GOING_AWAY = 1001
+ PROTOCOL_ERROR = 1002
+ UNSUPPORTED_DATA = 1003
+ ABNORMAL_CLOSURE = 1006
+ INVALID_TEXT = 1007
+ POLICY_VIOLATION = 1008
+ MESSAGE_TOO_BIG = 1009
+ MANDATORY_EXTENSION = 1010
+ INTERNAL_ERROR = 1011
+ SERVICE_RESTART = 1012
+ TRY_AGAIN_LATER = 1013
+ BAD_GATEWAY = 1014
+
+
+class WSMsgType(IntEnum):
+ # websocket spec types
+ CONTINUATION = 0x0
+ TEXT = 0x1
+ BINARY = 0x2
+ PING = 0x9
+ PONG = 0xA
+ CLOSE = 0x8
+
+ # aiohttp specific types
+ CLOSING = 0x100
+ CLOSED = 0x101
+ ERROR = 0x102
+
+ text = TEXT
+ binary = BINARY
+ ping = PING
+ pong = PONG
+ close = CLOSE
+ closing = CLOSING
+ closed = CLOSED
+ error = ERROR
+
+
+class WSMessage(NamedTuple):
+ type: WSMsgType
+ # To type correctly, this would need some kind of tagged union for each type.
+ data: Any
+ extra: Optional[str]
+
+ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
+ """Return parsed JSON data.
+
+ .. versionadded:: 0.22
+ """
+ return loads(self.data)
+
+
+# Constructing the tuple directly to avoid the overhead of
+# the lambda and arg processing since NamedTuples are constructed
+# with a run time built lambda
+# https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441
+WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None))
+WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None))
+
+
+class WebSocketError(Exception):
+ """WebSocket protocol parser error."""
+
+ def __init__(self, code: int, message: str) -> None:
+ self.code = code
+ super().__init__(code, message)
+
+ def __str__(self) -> str:
+ return cast(str, self.args[1])
+
+
+class WSHandshakeError(Exception):
+ """WebSocket protocol handshake error."""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader.py"
new file mode 100644
index 0000000..23f3226
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader.py"
@@ -0,0 +1,31 @@
+"""Reader for WebSocket protocol versions 13 and 8."""
+
+from typing import TYPE_CHECKING
+
+from ..helpers import NO_EXTENSIONS
+
+if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover
+ from .reader_py import (
+ WebSocketDataQueue as WebSocketDataQueuePython,
+ WebSocketReader as WebSocketReaderPython,
+ )
+
+ WebSocketReader = WebSocketReaderPython
+ WebSocketDataQueue = WebSocketDataQueuePython
+else:
+ try:
+ from .reader_c import ( # type: ignore[import-not-found]
+ WebSocketDataQueue as WebSocketDataQueueCython,
+ WebSocketReader as WebSocketReaderCython,
+ )
+
+ WebSocketReader = WebSocketReaderCython
+ WebSocketDataQueue = WebSocketDataQueueCython
+ except ImportError: # pragma: no cover
+ from .reader_py import (
+ WebSocketDataQueue as WebSocketDataQueuePython,
+ WebSocketReader as WebSocketReaderPython,
+ )
+
+ WebSocketReader = WebSocketReaderPython
+ WebSocketDataQueue = WebSocketDataQueuePython
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..85fa3fa
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.pxd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.pxd"
new file mode 100644
index 0000000..a7620d8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.pxd"
@@ -0,0 +1,110 @@
+import cython
+
+from .mask cimport _websocket_mask_cython as websocket_mask
+
+
+cdef unsigned int READ_HEADER
+cdef unsigned int READ_PAYLOAD_LENGTH
+cdef unsigned int READ_PAYLOAD_MASK
+cdef unsigned int READ_PAYLOAD
+
+cdef int OP_CODE_NOT_SET
+cdef int OP_CODE_CONTINUATION
+cdef int OP_CODE_TEXT
+cdef int OP_CODE_BINARY
+cdef int OP_CODE_CLOSE
+cdef int OP_CODE_PING
+cdef int OP_CODE_PONG
+
+cdef int COMPRESSED_NOT_SET
+cdef int COMPRESSED_FALSE
+cdef int COMPRESSED_TRUE
+
+cdef object UNPACK_LEN3
+cdef object UNPACK_CLOSE_CODE
+cdef object TUPLE_NEW
+
+cdef object WSMsgType
+cdef object WSMessage
+
+cdef object WS_MSG_TYPE_TEXT
+cdef object WS_MSG_TYPE_BINARY
+
+cdef set ALLOWED_CLOSE_CODES
+cdef set MESSAGE_TYPES_WITH_CONTENT
+
+cdef tuple EMPTY_FRAME
+cdef tuple EMPTY_FRAME_ERROR
+
+cdef class WebSocketDataQueue:
+
+ cdef unsigned int _size
+ cdef public object _protocol
+ cdef unsigned int _limit
+ cdef object _loop
+ cdef bint _eof
+ cdef object _waiter
+ cdef object _exception
+ cdef public object _buffer
+ cdef object _get_buffer
+ cdef object _put_buffer
+
+ cdef void _release_waiter(self)
+
+ cpdef void feed_data(self, object data, unsigned int size)
+
+ @cython.locals(size="unsigned int")
+ cdef _read_from_buffer(self)
+
+cdef class WebSocketReader:
+
+ cdef WebSocketDataQueue queue
+ cdef unsigned int _max_msg_size
+
+ cdef Exception _exc
+ cdef bytearray _partial
+ cdef unsigned int _state
+
+ cdef int _opcode
+ cdef bint _frame_fin
+ cdef int _frame_opcode
+ cdef list _payload_fragments
+ cdef Py_ssize_t _frame_payload_len
+
+ cdef bytes _tail
+ cdef bint _has_mask
+ cdef bytes _frame_mask
+ cdef Py_ssize_t _payload_bytes_to_read
+ cdef unsigned int _payload_len_flag
+ cdef int _compressed
+ cdef object _decompressobj
+ cdef bint _compress
+
+ cpdef tuple feed_data(self, object data)
+
+ @cython.locals(
+ is_continuation=bint,
+ fin=bint,
+ has_partial=bint,
+ payload_merged=bytes,
+ )
+ cpdef void _handle_frame(self, bint fin, int opcode, object payload, int compressed) except *
+
+ @cython.locals(
+ start_pos=Py_ssize_t,
+ data_len=Py_ssize_t,
+ length=Py_ssize_t,
+ chunk_size=Py_ssize_t,
+ chunk_len=Py_ssize_t,
+ data_len=Py_ssize_t,
+ data_cstr="const unsigned char *",
+ first_byte="unsigned char",
+ second_byte="unsigned char",
+ f_start_pos=Py_ssize_t,
+ f_end_pos=Py_ssize_t,
+ has_mask=bint,
+ fin=bint,
+ had_fragments=Py_ssize_t,
+ payload_bytearray=bytearray,
+ )
+ cpdef void _feed_data(self, bytes data) except *
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.py"
new file mode 100644
index 0000000..5166d7e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_c.py"
@@ -0,0 +1,478 @@
+"""Reader for WebSocket protocol versions 13 and 8."""
+
+import asyncio
+import builtins
+from collections import deque
+from typing import Deque, Final, Optional, Set, Tuple, Union
+
+from ..base_protocol import BaseProtocol
+from ..compression_utils import ZLibDecompressor
+from ..helpers import _EXC_SENTINEL, set_exception
+from ..streams import EofStream
+from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask
+from .models import (
+ WS_DEFLATE_TRAILING,
+ WebSocketError,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType,
+)
+
+ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
+
+# States for the reader, used to parse the WebSocket frame
+# integer values are used so they can be cythonized
+READ_HEADER = 1
+READ_PAYLOAD_LENGTH = 2
+READ_PAYLOAD_MASK = 3
+READ_PAYLOAD = 4
+
+WS_MSG_TYPE_BINARY = WSMsgType.BINARY
+WS_MSG_TYPE_TEXT = WSMsgType.TEXT
+
+# WSMsgType values unpacked so they can by cythonized to ints
+OP_CODE_NOT_SET = -1
+OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value
+OP_CODE_TEXT = WSMsgType.TEXT.value
+OP_CODE_BINARY = WSMsgType.BINARY.value
+OP_CODE_CLOSE = WSMsgType.CLOSE.value
+OP_CODE_PING = WSMsgType.PING.value
+OP_CODE_PONG = WSMsgType.PONG.value
+
+EMPTY_FRAME_ERROR = (True, b"")
+EMPTY_FRAME = (False, b"")
+
+COMPRESSED_NOT_SET = -1
+COMPRESSED_FALSE = 0
+COMPRESSED_TRUE = 1
+
+TUPLE_NEW = tuple.__new__
+
+cython_int = int # Typed to int in Python, but cython with use a signed int in the pxd
+
+
+class WebSocketDataQueue:
+ """WebSocketDataQueue resumes and pauses an underlying stream.
+
+ It is a destination for WebSocket data.
+ """
+
+ def __init__(
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
+ ) -> None:
+ self._size = 0
+ self._protocol = protocol
+ self._limit = limit * 2
+ self._loop = loop
+ self._eof = False
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._exception: Union[BaseException, None] = None
+ self._buffer: Deque[Tuple[WSMessage, int]] = deque()
+ self._get_buffer = self._buffer.popleft
+ self._put_buffer = self._buffer.append
+
+ def is_eof(self) -> bool:
+ return self._eof
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: builtins.BaseException = _EXC_SENTINEL,
+ ) -> None:
+ self._eof = True
+ self._exception = exc
+ if (waiter := self._waiter) is not None:
+ self._waiter = None
+ set_exception(waiter, exc, exc_cause)
+
+ def _release_waiter(self) -> None:
+ if (waiter := self._waiter) is None:
+ return
+ self._waiter = None
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+ self._release_waiter()
+ self._exception = None # Break cyclic references
+
+ def feed_data(self, data: "WSMessage", size: "cython_int") -> None:
+ self._size += size
+ self._put_buffer((data, size))
+ self._release_waiter()
+ if self._size > self._limit and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ async def read(self) -> WSMessage:
+ if not self._buffer and not self._eof:
+ assert not self._waiter
+ self._waiter = self._loop.create_future()
+ try:
+ await self._waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._waiter = None
+ raise
+ return self._read_from_buffer()
+
+ def _read_from_buffer(self) -> WSMessage:
+ if self._buffer:
+ data, size = self._get_buffer()
+ self._size -= size
+ if self._size < self._limit and self._protocol._reading_paused:
+ self._protocol.resume_reading()
+ return data
+ if self._exception is not None:
+ raise self._exception
+ raise EofStream
+
+
+class WebSocketReader:
+ def __init__(
+ self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True
+ ) -> None:
+ self.queue = queue
+ self._max_msg_size = max_msg_size
+
+ self._exc: Optional[Exception] = None
+ self._partial = bytearray()
+ self._state = READ_HEADER
+
+ self._opcode: int = OP_CODE_NOT_SET
+ self._frame_fin = False
+ self._frame_opcode: int = OP_CODE_NOT_SET
+ self._payload_fragments: list[bytes] = []
+ self._frame_payload_len = 0
+
+ self._tail: bytes = b""
+ self._has_mask = False
+ self._frame_mask: Optional[bytes] = None
+ self._payload_bytes_to_read = 0
+ self._payload_len_flag = 0
+ self._compressed: int = COMPRESSED_NOT_SET
+ self._decompressobj: Optional[ZLibDecompressor] = None
+ self._compress = compress
+
+ def feed_eof(self) -> None:
+ self.queue.feed_eof()
+
+ # data can be bytearray on Windows because proactor event loop uses bytearray
+ # and asyncio types this to Union[bytes, bytearray, memoryview] so we need
+ # coerce data to bytes if it is not
+ def feed_data(
+ self, data: Union[bytes, bytearray, memoryview]
+ ) -> Tuple[bool, bytes]:
+ if type(data) is not bytes:
+ data = bytes(data)
+
+ if self._exc is not None:
+ return True, data
+
+ try:
+ self._feed_data(data)
+ except Exception as exc:
+ self._exc = exc
+ set_exception(self.queue, exc)
+ return EMPTY_FRAME_ERROR
+
+ return EMPTY_FRAME
+
+ def _handle_frame(
+ self,
+ fin: bool,
+ opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int
+ payload: Union[bytes, bytearray],
+ compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int
+ ) -> None:
+ msg: WSMessage
+ if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}:
+ # Validate continuation frames before processing
+ if opcode == OP_CODE_CONTINUATION and self._opcode == OP_CODE_NOT_SET:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Continuation frame for non started message",
+ )
+
+ # load text/binary
+ if not fin:
+ # got partial frame payload
+ if opcode != OP_CODE_CONTINUATION:
+ self._opcode = opcode
+ self._partial += payload
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ f"Message size {len(self._partial)} "
+ f"exceeds limit {self._max_msg_size}",
+ )
+ return
+
+ has_partial = bool(self._partial)
+ if opcode == OP_CODE_CONTINUATION:
+ opcode = self._opcode
+ self._opcode = OP_CODE_NOT_SET
+ # previous frame was non finished
+ # we should get continuation opcode
+ elif has_partial:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "The opcode in non-fin frame is expected "
+ f"to be zero, got {opcode!r}",
+ )
+
+ assembled_payload: Union[bytes, bytearray]
+ if has_partial:
+ assembled_payload = self._partial + payload
+ self._partial.clear()
+ else:
+ assembled_payload = payload
+
+ if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ f"Message size {len(assembled_payload)} "
+ f"exceeds limit {self._max_msg_size}",
+ )
+
+ # Decompress process must to be done after all packets
+ # received.
+ if compressed:
+ if not self._decompressobj:
+ self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
+ # XXX: It's possible that the zlib backend (isal is known to
+ # do this, maybe others too?) will return max_length bytes,
+ # but internally buffer more data such that the payload is
+ # >max_length, so we return one extra byte and if we're able
+ # to do that, then the message is too big.
+ payload_merged = self._decompressobj.decompress_sync(
+ assembled_payload + WS_DEFLATE_TRAILING,
+ (
+ self._max_msg_size + 1
+ if self._max_msg_size
+ else self._max_msg_size
+ ),
+ )
+ if self._max_msg_size and len(payload_merged) > self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ f"Decompressed message exceeds size limit {self._max_msg_size}",
+ )
+ elif type(assembled_payload) is bytes:
+ payload_merged = assembled_payload
+ else:
+ payload_merged = bytes(assembled_payload)
+
+ if opcode == OP_CODE_TEXT:
+ try:
+ text = payload_merged.decode("utf-8")
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+
+ # XXX: The Text and Binary messages here can be a performance
+ # bottleneck, so we use tuple.__new__ to improve performance.
+ # This is not type safe, but many tests should fail in
+ # test_client_ws_functional.py if this is wrong.
+ self.queue.feed_data(
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),
+ len(payload_merged),
+ )
+ else:
+ self.queue.feed_data(
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),
+ len(payload_merged),
+ )
+ elif opcode == OP_CODE_CLOSE:
+ if len(payload) >= 2:
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close code: {close_code}",
+ )
+ try:
+ close_message = payload[2:].decode("utf-8")
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message))
+ elif payload:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
+ )
+ else:
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))
+
+ self.queue.feed_data(msg, 0)
+ elif opcode == OP_CODE_PING:
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))
+ self.queue.feed_data(msg, len(payload))
+ elif opcode == OP_CODE_PONG:
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))
+ self.queue.feed_data(msg, len(payload))
+ else:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
+ )
+
+ def _feed_data(self, data: bytes) -> None:
+ """Return the next frame from the socket."""
+ if self._tail:
+ data, self._tail = self._tail + data, b""
+
+ start_pos: int = 0
+ data_len = len(data)
+ data_cstr = data
+
+ while True:
+ # read header
+ if self._state == READ_HEADER:
+ if data_len - start_pos < 2:
+ break
+ first_byte = data_cstr[start_pos]
+ second_byte = data_cstr[start_pos + 1]
+ start_pos += 2
+
+ fin = (first_byte >> 7) & 1
+ rsv1 = (first_byte >> 6) & 1
+ rsv2 = (first_byte >> 5) & 1
+ rsv3 = (first_byte >> 4) & 1
+ opcode = first_byte & 0xF
+
+ # frame-fin = %x0 ; more frames of this message follow
+ # / %x1 ; final frame of this message
+ # frame-rsv1 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv2 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv3 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ #
+ # Remove rsv1 from this test for deflate development
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ if opcode > 0x7 and fin == 0:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received fragmented control frame",
+ )
+
+ has_mask = (second_byte >> 7) & 1
+ length = second_byte & 0x7F
+
+ # Control frames MUST have a payload
+ # length of 125 bytes or less
+ if opcode > 0x7 and length > 125:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Control frame payload cannot be larger than 125 bytes",
+ )
+
+ # Set compress status if last package is FIN
+ # OR set compress status if this is first fragment
+ # Raise error if not first fragment with rsv1 = 0x1
+ if self._frame_fin or self._compressed == COMPRESSED_NOT_SET:
+ self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE
+ elif rsv1:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ self._frame_fin = bool(fin)
+ self._frame_opcode = opcode
+ self._has_mask = bool(has_mask)
+ self._payload_len_flag = length
+ self._state = READ_PAYLOAD_LENGTH
+
+ # read payload length
+ if self._state == READ_PAYLOAD_LENGTH:
+ len_flag = self._payload_len_flag
+ if len_flag == 126:
+ if data_len - start_pos < 2:
+ break
+ first_byte = data_cstr[start_pos]
+ second_byte = data_cstr[start_pos + 1]
+ start_pos += 2
+ self._payload_bytes_to_read = first_byte << 8 | second_byte
+ elif len_flag > 126:
+ if data_len - start_pos < 8:
+ break
+ self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0]
+ start_pos += 8
+ else:
+ self._payload_bytes_to_read = len_flag
+
+ self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD
+
+ # read payload mask
+ if self._state == READ_PAYLOAD_MASK:
+ if data_len - start_pos < 4:
+ break
+ self._frame_mask = data_cstr[start_pos : start_pos + 4]
+ start_pos += 4
+ self._state = READ_PAYLOAD
+
+ if self._state == READ_PAYLOAD:
+ chunk_len = data_len - start_pos
+ if self._payload_bytes_to_read >= chunk_len:
+ f_end_pos = data_len
+ self._payload_bytes_to_read -= chunk_len
+ else:
+ f_end_pos = start_pos + self._payload_bytes_to_read
+ self._payload_bytes_to_read = 0
+
+ had_fragments = self._frame_payload_len
+ self._frame_payload_len += f_end_pos - start_pos
+ f_start_pos = start_pos
+ start_pos = f_end_pos
+
+ if self._payload_bytes_to_read != 0:
+ # If we don't have a complete frame, we need to save the
+ # data for the next call to feed_data.
+ self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])
+ break
+
+ payload: Union[bytes, bytearray]
+ if had_fragments:
+ # We have to join the payload fragments get the payload
+ self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])
+ if self._has_mask:
+ assert self._frame_mask is not None
+ payload_bytearray = bytearray(b"".join(self._payload_fragments))
+ websocket_mask(self._frame_mask, payload_bytearray)
+ payload = payload_bytearray
+ else:
+ payload = b"".join(self._payload_fragments)
+ self._payload_fragments.clear()
+ elif self._has_mask:
+ assert self._frame_mask is not None
+ payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment]
+ if type(payload_bytearray) is not bytearray: # pragma: no branch
+ # Cython will do the conversion for us
+ # but we need to do it for Python and we
+ # will always get here in Python
+ payload_bytearray = bytearray(payload_bytearray)
+ websocket_mask(self._frame_mask, payload_bytearray)
+ payload = payload_bytearray
+ else:
+ payload = data_cstr[f_start_pos:f_end_pos]
+
+ self._handle_frame(
+ self._frame_fin, self._frame_opcode, payload, self._compressed
+ )
+ self._frame_payload_len = 0
+ self._state = READ_HEADER
+
+ # XXX: Cython needs slices to be bounded, so we can't omit the slice end here.
+ self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_py.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_py.py"
new file mode 100644
index 0000000..5166d7e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/reader_py.py"
@@ -0,0 +1,478 @@
+"""Reader for WebSocket protocol versions 13 and 8."""
+
+import asyncio
+import builtins
+from collections import deque
+from typing import Deque, Final, Optional, Set, Tuple, Union
+
+from ..base_protocol import BaseProtocol
+from ..compression_utils import ZLibDecompressor
+from ..helpers import _EXC_SENTINEL, set_exception
+from ..streams import EofStream
+from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask
+from .models import (
+ WS_DEFLATE_TRAILING,
+ WebSocketError,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType,
+)
+
+ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
+
+# States for the reader, used to parse the WebSocket frame
+# integer values are used so they can be cythonized
+READ_HEADER = 1
+READ_PAYLOAD_LENGTH = 2
+READ_PAYLOAD_MASK = 3
+READ_PAYLOAD = 4
+
+WS_MSG_TYPE_BINARY = WSMsgType.BINARY
+WS_MSG_TYPE_TEXT = WSMsgType.TEXT
+
+# WSMsgType values unpacked so they can by cythonized to ints
+OP_CODE_NOT_SET = -1
+OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value
+OP_CODE_TEXT = WSMsgType.TEXT.value
+OP_CODE_BINARY = WSMsgType.BINARY.value
+OP_CODE_CLOSE = WSMsgType.CLOSE.value
+OP_CODE_PING = WSMsgType.PING.value
+OP_CODE_PONG = WSMsgType.PONG.value
+
+EMPTY_FRAME_ERROR = (True, b"")
+EMPTY_FRAME = (False, b"")
+
+COMPRESSED_NOT_SET = -1
+COMPRESSED_FALSE = 0
+COMPRESSED_TRUE = 1
+
+TUPLE_NEW = tuple.__new__
+
+cython_int = int # Typed to int in Python, but cython with use a signed int in the pxd
+
+
+class WebSocketDataQueue:
+ """WebSocketDataQueue resumes and pauses an underlying stream.
+
+ It is a destination for WebSocket data.
+ """
+
+ def __init__(
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
+ ) -> None:
+ self._size = 0
+ self._protocol = protocol
+ self._limit = limit * 2
+ self._loop = loop
+ self._eof = False
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._exception: Union[BaseException, None] = None
+ self._buffer: Deque[Tuple[WSMessage, int]] = deque()
+ self._get_buffer = self._buffer.popleft
+ self._put_buffer = self._buffer.append
+
+ def is_eof(self) -> bool:
+ return self._eof
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: builtins.BaseException = _EXC_SENTINEL,
+ ) -> None:
+ self._eof = True
+ self._exception = exc
+ if (waiter := self._waiter) is not None:
+ self._waiter = None
+ set_exception(waiter, exc, exc_cause)
+
+ def _release_waiter(self) -> None:
+ if (waiter := self._waiter) is None:
+ return
+ self._waiter = None
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+ self._release_waiter()
+ self._exception = None # Break cyclic references
+
+ def feed_data(self, data: "WSMessage", size: "cython_int") -> None:
+ self._size += size
+ self._put_buffer((data, size))
+ self._release_waiter()
+ if self._size > self._limit and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ async def read(self) -> WSMessage:
+ if not self._buffer and not self._eof:
+ assert not self._waiter
+ self._waiter = self._loop.create_future()
+ try:
+ await self._waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._waiter = None
+ raise
+ return self._read_from_buffer()
+
+ def _read_from_buffer(self) -> WSMessage:
+ if self._buffer:
+ data, size = self._get_buffer()
+ self._size -= size
+ if self._size < self._limit and self._protocol._reading_paused:
+ self._protocol.resume_reading()
+ return data
+ if self._exception is not None:
+ raise self._exception
+ raise EofStream
+
+
+class WebSocketReader:
+ def __init__(
+ self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True
+ ) -> None:
+ self.queue = queue
+ self._max_msg_size = max_msg_size
+
+ self._exc: Optional[Exception] = None
+ self._partial = bytearray()
+ self._state = READ_HEADER
+
+ self._opcode: int = OP_CODE_NOT_SET
+ self._frame_fin = False
+ self._frame_opcode: int = OP_CODE_NOT_SET
+ self._payload_fragments: list[bytes] = []
+ self._frame_payload_len = 0
+
+ self._tail: bytes = b""
+ self._has_mask = False
+ self._frame_mask: Optional[bytes] = None
+ self._payload_bytes_to_read = 0
+ self._payload_len_flag = 0
+ self._compressed: int = COMPRESSED_NOT_SET
+ self._decompressobj: Optional[ZLibDecompressor] = None
+ self._compress = compress
+
+ def feed_eof(self) -> None:
+ self.queue.feed_eof()
+
+ # data can be bytearray on Windows because proactor event loop uses bytearray
+ # and asyncio types this to Union[bytes, bytearray, memoryview] so we need
+ # coerce data to bytes if it is not
+ def feed_data(
+ self, data: Union[bytes, bytearray, memoryview]
+ ) -> Tuple[bool, bytes]:
+ if type(data) is not bytes:
+ data = bytes(data)
+
+ if self._exc is not None:
+ return True, data
+
+ try:
+ self._feed_data(data)
+ except Exception as exc:
+ self._exc = exc
+ set_exception(self.queue, exc)
+ return EMPTY_FRAME_ERROR
+
+ return EMPTY_FRAME
+
+ def _handle_frame(
+ self,
+ fin: bool,
+ opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int
+ payload: Union[bytes, bytearray],
+ compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int
+ ) -> None:
+ msg: WSMessage
+ if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}:
+ # Validate continuation frames before processing
+ if opcode == OP_CODE_CONTINUATION and self._opcode == OP_CODE_NOT_SET:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Continuation frame for non started message",
+ )
+
+ # load text/binary
+ if not fin:
+ # got partial frame payload
+ if opcode != OP_CODE_CONTINUATION:
+ self._opcode = opcode
+ self._partial += payload
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ f"Message size {len(self._partial)} "
+ f"exceeds limit {self._max_msg_size}",
+ )
+ return
+
+ has_partial = bool(self._partial)
+ if opcode == OP_CODE_CONTINUATION:
+ opcode = self._opcode
+ self._opcode = OP_CODE_NOT_SET
+ # previous frame was non finished
+ # we should get continuation opcode
+ elif has_partial:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "The opcode in non-fin frame is expected "
+ f"to be zero, got {opcode!r}",
+ )
+
+ assembled_payload: Union[bytes, bytearray]
+ if has_partial:
+ assembled_payload = self._partial + payload
+ self._partial.clear()
+ else:
+ assembled_payload = payload
+
+ if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ f"Message size {len(assembled_payload)} "
+ f"exceeds limit {self._max_msg_size}",
+ )
+
+ # Decompress process must to be done after all packets
+ # received.
+ if compressed:
+ if not self._decompressobj:
+ self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
+ # XXX: It's possible that the zlib backend (isal is known to
+ # do this, maybe others too?) will return max_length bytes,
+ # but internally buffer more data such that the payload is
+ # >max_length, so we return one extra byte and if we're able
+ # to do that, then the message is too big.
+ payload_merged = self._decompressobj.decompress_sync(
+ assembled_payload + WS_DEFLATE_TRAILING,
+ (
+ self._max_msg_size + 1
+ if self._max_msg_size
+ else self._max_msg_size
+ ),
+ )
+ if self._max_msg_size and len(payload_merged) > self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ f"Decompressed message exceeds size limit {self._max_msg_size}",
+ )
+ elif type(assembled_payload) is bytes:
+ payload_merged = assembled_payload
+ else:
+ payload_merged = bytes(assembled_payload)
+
+ if opcode == OP_CODE_TEXT:
+ try:
+ text = payload_merged.decode("utf-8")
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+
+ # XXX: The Text and Binary messages here can be a performance
+ # bottleneck, so we use tuple.__new__ to improve performance.
+ # This is not type safe, but many tests should fail in
+ # test_client_ws_functional.py if this is wrong.
+ self.queue.feed_data(
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),
+ len(payload_merged),
+ )
+ else:
+ self.queue.feed_data(
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),
+ len(payload_merged),
+ )
+ elif opcode == OP_CODE_CLOSE:
+ if len(payload) >= 2:
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close code: {close_code}",
+ )
+ try:
+ close_message = payload[2:].decode("utf-8")
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, close_code, close_message))
+ elif payload:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
+ )
+ else:
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))
+
+ self.queue.feed_data(msg, 0)
+ elif opcode == OP_CODE_PING:
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))
+ self.queue.feed_data(msg, len(payload))
+ elif opcode == OP_CODE_PONG:
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))
+ self.queue.feed_data(msg, len(payload))
+ else:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
+ )
+
+ def _feed_data(self, data: bytes) -> None:
+ """Return the next frame from the socket."""
+ if self._tail:
+ data, self._tail = self._tail + data, b""
+
+ start_pos: int = 0
+ data_len = len(data)
+ data_cstr = data
+
+ while True:
+ # read header
+ if self._state == READ_HEADER:
+ if data_len - start_pos < 2:
+ break
+ first_byte = data_cstr[start_pos]
+ second_byte = data_cstr[start_pos + 1]
+ start_pos += 2
+
+ fin = (first_byte >> 7) & 1
+ rsv1 = (first_byte >> 6) & 1
+ rsv2 = (first_byte >> 5) & 1
+ rsv3 = (first_byte >> 4) & 1
+ opcode = first_byte & 0xF
+
+ # frame-fin = %x0 ; more frames of this message follow
+ # / %x1 ; final frame of this message
+ # frame-rsv1 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv2 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv3 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ #
+ # Remove rsv1 from this test for deflate development
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ if opcode > 0x7 and fin == 0:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received fragmented control frame",
+ )
+
+ has_mask = (second_byte >> 7) & 1
+ length = second_byte & 0x7F
+
+ # Control frames MUST have a payload
+ # length of 125 bytes or less
+ if opcode > 0x7 and length > 125:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Control frame payload cannot be larger than 125 bytes",
+ )
+
+ # Set compress status if last package is FIN
+ # OR set compress status if this is first fragment
+ # Raise error if not first fragment with rsv1 = 0x1
+ if self._frame_fin or self._compressed == COMPRESSED_NOT_SET:
+ self._compressed = COMPRESSED_TRUE if rsv1 else COMPRESSED_FALSE
+ elif rsv1:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ self._frame_fin = bool(fin)
+ self._frame_opcode = opcode
+ self._has_mask = bool(has_mask)
+ self._payload_len_flag = length
+ self._state = READ_PAYLOAD_LENGTH
+
+ # read payload length
+ if self._state == READ_PAYLOAD_LENGTH:
+ len_flag = self._payload_len_flag
+ if len_flag == 126:
+ if data_len - start_pos < 2:
+ break
+ first_byte = data_cstr[start_pos]
+ second_byte = data_cstr[start_pos + 1]
+ start_pos += 2
+ self._payload_bytes_to_read = first_byte << 8 | second_byte
+ elif len_flag > 126:
+ if data_len - start_pos < 8:
+ break
+ self._payload_bytes_to_read = UNPACK_LEN3(data, start_pos)[0]
+ start_pos += 8
+ else:
+ self._payload_bytes_to_read = len_flag
+
+ self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD
+
+ # read payload mask
+ if self._state == READ_PAYLOAD_MASK:
+ if data_len - start_pos < 4:
+ break
+ self._frame_mask = data_cstr[start_pos : start_pos + 4]
+ start_pos += 4
+ self._state = READ_PAYLOAD
+
+ if self._state == READ_PAYLOAD:
+ chunk_len = data_len - start_pos
+ if self._payload_bytes_to_read >= chunk_len:
+ f_end_pos = data_len
+ self._payload_bytes_to_read -= chunk_len
+ else:
+ f_end_pos = start_pos + self._payload_bytes_to_read
+ self._payload_bytes_to_read = 0
+
+ had_fragments = self._frame_payload_len
+ self._frame_payload_len += f_end_pos - start_pos
+ f_start_pos = start_pos
+ start_pos = f_end_pos
+
+ if self._payload_bytes_to_read != 0:
+ # If we don't have a complete frame, we need to save the
+ # data for the next call to feed_data.
+ self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])
+ break
+
+ payload: Union[bytes, bytearray]
+ if had_fragments:
+ # We have to join the payload fragments get the payload
+ self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos])
+ if self._has_mask:
+ assert self._frame_mask is not None
+ payload_bytearray = bytearray(b"".join(self._payload_fragments))
+ websocket_mask(self._frame_mask, payload_bytearray)
+ payload = payload_bytearray
+ else:
+ payload = b"".join(self._payload_fragments)
+ self._payload_fragments.clear()
+ elif self._has_mask:
+ assert self._frame_mask is not None
+ payload_bytearray = data_cstr[f_start_pos:f_end_pos] # type: ignore[assignment]
+ if type(payload_bytearray) is not bytearray: # pragma: no branch
+ # Cython will do the conversion for us
+ # but we need to do it for Python and we
+ # will always get here in Python
+ payload_bytearray = bytearray(payload_bytearray)
+ websocket_mask(self._frame_mask, payload_bytearray)
+ payload = payload_bytearray
+ else:
+ payload = data_cstr[f_start_pos:f_end_pos]
+
+ self._handle_frame(
+ self._frame_fin, self._frame_opcode, payload, self._compressed
+ )
+ self._frame_payload_len = 0
+ self._state = READ_HEADER
+
+ # XXX: Cython needs slices to be bounded, so we can't omit the slice end here.
+ self._tail = data_cstr[start_pos:data_len] if start_pos < data_len else b""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/writer.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/writer.py"
new file mode 100644
index 0000000..9604202
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/_websocket/writer.py"
@@ -0,0 +1,262 @@
+"""WebSocket protocol versions 13 and 8."""
+
+import asyncio
+import random
+import sys
+from functools import partial
+from typing import Final, Optional, Set, Union
+
+from ..base_protocol import BaseProtocol
+from ..client_exceptions import ClientConnectionResetError
+from ..compression_utils import ZLibBackend, ZLibCompressor
+from .helpers import (
+ MASK_LEN,
+ MSG_SIZE,
+ PACK_CLOSE_CODE,
+ PACK_LEN1,
+ PACK_LEN2,
+ PACK_LEN3,
+ PACK_RANDBITS,
+ websocket_mask,
+)
+from .models import WS_DEFLATE_TRAILING, WSMsgType
+
+DEFAULT_LIMIT: Final[int] = 2**16
+
+# WebSocket opcode boundary: opcodes 0-7 are data frames, 8-15 are control frames
+# Control frames (ping, pong, close) are never compressed
+WS_CONTROL_FRAME_OPCODE: Final[int] = 8
+
+# For websockets, keeping latency low is extremely important as implementations
+# generally expect to be able to send and receive messages quickly. We use a
+# larger chunk size to reduce the number of executor calls and avoid task
+# creation overhead, since both are significant sources of latency when chunks
+# are small. A size of 16KiB was chosen as a balance between avoiding task
+# overhead and not blocking the event loop too long with synchronous compression.
+
+WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 16 * 1024
+
+
+class WebSocketWriter:
+ """WebSocket writer.
+
+ The writer is responsible for sending messages to the client. It is
+ created by the protocol when a connection is established. The writer
+ should avoid implementing any application logic and should only be
+ concerned with the low-level details of the WebSocket protocol.
+ """
+
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ transport: asyncio.Transport,
+ *,
+ use_mask: bool = False,
+ limit: int = DEFAULT_LIMIT,
+ random: random.Random = random.Random(),
+ compress: int = 0,
+ notakeover: bool = False,
+ ) -> None:
+ """Initialize a WebSocket writer."""
+ self.protocol = protocol
+ self.transport = transport
+ self.use_mask = use_mask
+ self.get_random_bits = partial(random.getrandbits, 32)
+ self.compress = compress
+ self.notakeover = notakeover
+ self._closing = False
+ self._limit = limit
+ self._output_size = 0
+ self._compressobj: Optional[ZLibCompressor] = None
+ self._send_lock = asyncio.Lock()
+ self._background_tasks: Set[asyncio.Task[None]] = set()
+
+ async def send_frame(
+ self, message: bytes, opcode: int, compress: Optional[int] = None
+ ) -> None:
+ """Send a frame over the websocket with message as its payload."""
+ if self._closing and not (opcode & WSMsgType.CLOSE):
+ raise ClientConnectionResetError("Cannot write to closing transport")
+
+ if not (compress or self.compress) or opcode >= WS_CONTROL_FRAME_OPCODE:
+ # Non-compressed frames don't need lock or shield
+ self._write_websocket_frame(message, opcode, 0)
+ elif len(message) <= WEBSOCKET_MAX_SYNC_CHUNK_SIZE:
+ # Small compressed payloads - compress synchronously in event loop
+ # We need the lock even though sync compression has no await points.
+ # This prevents small frames from interleaving with large frames that
+ # compress in the executor, avoiding compressor state corruption.
+ async with self._send_lock:
+ self._send_compressed_frame_sync(message, opcode, compress)
+ else:
+ # Large compressed frames need shield to prevent corruption
+ # For large compressed frames, the entire compress+send
+ # operation must be atomic. If cancelled after compression but
+ # before send, the compressor state would be advanced but data
+ # not sent, corrupting subsequent frames.
+ # Create a task to shield from cancellation
+ # The lock is acquired inside the shielded task so the entire
+ # operation (lock + compress + send) completes atomically.
+ # Use eager_start on Python 3.12+ to avoid scheduling overhead
+ loop = asyncio.get_running_loop()
+ coro = self._send_compressed_frame_async_locked(message, opcode, compress)
+ if sys.version_info >= (3, 12):
+ send_task = asyncio.Task(coro, loop=loop, eager_start=True)
+ else:
+ send_task = loop.create_task(coro)
+ # Keep a strong reference to prevent garbage collection
+ self._background_tasks.add(send_task)
+ send_task.add_done_callback(self._background_tasks.discard)
+ await asyncio.shield(send_task)
+
+ # It is safe to return control to the event loop when using compression
+ # after this point as we have already sent or buffered all the data.
+ # Once we have written output_size up to the limit, we call the
+ # drain helper which waits for the transport to be ready to accept
+ # more data. This is a flow control mechanism to prevent the buffer
+ # from growing too large. The drain helper will return right away
+ # if the writer is not paused.
+ if self._output_size > self._limit:
+ self._output_size = 0
+ if self.protocol._paused:
+ await self.protocol._drain_helper()
+
+ def _write_websocket_frame(self, message: bytes, opcode: int, rsv: int) -> None:
+ """
+ Write a websocket frame to the transport.
+
+ This method handles frame header construction, masking, and writing to transport.
+ It does not handle compression or flow control - those are the responsibility
+ of the caller.
+ """
+ msg_length = len(message)
+
+ use_mask = self.use_mask
+ mask_bit = 0x80 if use_mask else 0
+
+ # Depending on the message length, the header is assembled differently.
+ # The first byte is reserved for the opcode and the RSV bits.
+ first_byte = 0x80 | rsv | opcode
+ if msg_length < 126:
+ header = PACK_LEN1(first_byte, msg_length | mask_bit)
+ header_len = 2
+ elif msg_length < 65536:
+ header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length)
+ header_len = 4
+ else:
+ header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length)
+ header_len = 10
+
+ if self.transport.is_closing():
+ raise ClientConnectionResetError("Cannot write to closing transport")
+
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3
+ # If we are using a mask, we need to generate it randomly
+ # and apply it to the message before sending it. A mask is
+ # a 32-bit value that is applied to the message using a
+ # bitwise XOR operation. It is used to prevent certain types
+ # of attacks on the websocket protocol. The mask is only used
+ # when aiohttp is acting as a client. Servers do not use a mask.
+ if use_mask:
+ mask = PACK_RANDBITS(self.get_random_bits())
+ message = bytearray(message)
+ websocket_mask(mask, message)
+ self.transport.write(header + mask + message)
+ self._output_size += MASK_LEN
+ elif msg_length > MSG_SIZE:
+ self.transport.write(header)
+ self.transport.write(message)
+ else:
+ self.transport.write(header + message)
+
+ self._output_size += header_len + msg_length
+
+ def _get_compressor(self, compress: Optional[int]) -> ZLibCompressor:
+ """Get or create a compressor object for the given compression level."""
+ if compress:
+ # Do not set self._compress if compressing is for this frame
+ return ZLibCompressor(
+ level=ZLibBackend.Z_BEST_SPEED,
+ wbits=-compress,
+ max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
+ )
+ if not self._compressobj:
+ self._compressobj = ZLibCompressor(
+ level=ZLibBackend.Z_BEST_SPEED,
+ wbits=-self.compress,
+ max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
+ )
+ return self._compressobj
+
+ def _send_compressed_frame_sync(
+ self, message: bytes, opcode: int, compress: Optional[int]
+ ) -> None:
+ """
+ Synchronous send for small compressed frames.
+
+ This is used for small compressed payloads that compress synchronously in the event loop.
+ Since there are no await points, this is inherently cancellation-safe.
+ """
+ # RSV are the reserved bits in the frame header. They are used to
+ # indicate that the frame is using an extension.
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
+ compressobj = self._get_compressor(compress)
+ # (0x40) RSV1 is set for compressed frames
+ # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1
+ self._write_websocket_frame(
+ (
+ compressobj.compress_sync(message)
+ + compressobj.flush(
+ ZLibBackend.Z_FULL_FLUSH
+ if self.notakeover
+ else ZLibBackend.Z_SYNC_FLUSH
+ )
+ ).removesuffix(WS_DEFLATE_TRAILING),
+ opcode,
+ 0x40,
+ )
+
+ async def _send_compressed_frame_async_locked(
+ self, message: bytes, opcode: int, compress: Optional[int]
+ ) -> None:
+ """
+ Async send for large compressed frames with lock.
+
+ Acquires the lock and compresses large payloads asynchronously in
+ the executor. The lock is held for the entire operation to ensure
+ the compressor state is not corrupted by concurrent sends.
+
+ MUST be run shielded from cancellation. If cancelled after
+ compression but before sending, the compressor state would be
+ advanced but data not sent, corrupting subsequent frames.
+ """
+ async with self._send_lock:
+ # RSV are the reserved bits in the frame header. They are used to
+ # indicate that the frame is using an extension.
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
+ compressobj = self._get_compressor(compress)
+ # (0x40) RSV1 is set for compressed frames
+ # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1
+ self._write_websocket_frame(
+ (
+ await compressobj.compress(message)
+ + compressobj.flush(
+ ZLibBackend.Z_FULL_FLUSH
+ if self.notakeover
+ else ZLibBackend.Z_SYNC_FLUSH
+ )
+ ).removesuffix(WS_DEFLATE_TRAILING),
+ opcode,
+ 0x40,
+ )
+
+ async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
+ """Close the websocket, sending the specified code and message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ try:
+ await self.send_frame(
+ PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
+ )
+ finally:
+ self._closing = True
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/abc.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/abc.py"
new file mode 100644
index 0000000..faf0957
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/abc.py"
@@ -0,0 +1,268 @@
+import asyncio
+import logging
+import socket
+from abc import ABC, abstractmethod
+from collections.abc import Sized
+from http.cookies import BaseCookie, Morsel
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Dict,
+ Generator,
+ Iterable,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ TypedDict,
+ Union,
+)
+
+from multidict import CIMultiDict
+from yarl import URL
+
+from ._cookie_helpers import parse_set_cookie_headers
+from .typedefs import LooseCookies
+
+if TYPE_CHECKING:
+ from .web_app import Application
+ from .web_exceptions import HTTPException
+ from .web_request import BaseRequest, Request
+ from .web_response import StreamResponse
+else:
+ BaseRequest = Request = Application = StreamResponse = None
+ HTTPException = None
+
+
+class AbstractRouter(ABC):
+ def __init__(self) -> None:
+ self._frozen = False
+
+ def post_init(self, app: Application) -> None:
+ """Post init stage.
+
+ Not an abstract method for sake of backward compatibility,
+ but if the router wants to be aware of the application
+ it can override this.
+ """
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ """Freeze router."""
+ self._frozen = True
+
+ @abstractmethod
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
+ """Return MATCH_INFO for given request"""
+
+
+class AbstractMatchInfo(ABC):
+
+ __slots__ = ()
+
+ @property # pragma: no branch
+ @abstractmethod
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
+ """Execute matched request handler"""
+
+ @property
+ @abstractmethod
+ def expect_handler(
+ self,
+ ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
+ """Expect handler for 100-continue processing"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def http_exception(self) -> Optional[HTTPException]:
+ """HTTPException instance raised on router's resolving, or None"""
+
+ @abstractmethod # pragma: no branch
+ def get_info(self) -> Dict[str, Any]:
+ """Return a dict with additional info useful for introspection"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def apps(self) -> Tuple[Application, ...]:
+ """Stack of nested applications.
+
+ Top level application is left-most element.
+
+ """
+
+ @abstractmethod
+ def add_app(self, app: Application) -> None:
+ """Add application to the nested apps stack."""
+
+ @abstractmethod
+ def freeze(self) -> None:
+ """Freeze the match info.
+
+ The method is called after route resolution.
+
+ After the call .add_app() is forbidden.
+
+ """
+
+
+class AbstractView(ABC):
+ """Abstract class based view."""
+
+ def __init__(self, request: Request) -> None:
+ self._request = request
+
+ @property
+ def request(self) -> Request:
+ """Request instance."""
+ return self._request
+
+ @abstractmethod
+ def __await__(self) -> Generator[None, None, StreamResponse]:
+ """Execute the view handler."""
+
+
+class ResolveResult(TypedDict):
+ """Resolve result.
+
+ This is the result returned from an AbstractResolver's
+ resolve method.
+
+ :param hostname: The hostname that was provided.
+ :param host: The IP address that was resolved.
+ :param port: The port that was resolved.
+ :param family: The address family that was resolved.
+ :param proto: The protocol that was resolved.
+ :param flags: The flags that were resolved.
+ """
+
+ hostname: str
+ host: str
+ port: int
+ family: int
+ proto: int
+ flags: int
+
+
+class AbstractResolver(ABC):
+ """Abstract DNS resolver."""
+
+ @abstractmethod
+ async def resolve(
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
+ ) -> List[ResolveResult]:
+ """Return IP address for given hostname"""
+
+ @abstractmethod
+ async def close(self) -> None:
+ """Release resolver"""
+
+
+if TYPE_CHECKING:
+ IterableBase = Iterable[Morsel[str]]
+else:
+ IterableBase = Iterable
+
+
+ClearCookiePredicate = Callable[["Morsel[str]"], bool]
+
+
+class AbstractCookieJar(Sized, IterableBase):
+ """Abstract Cookie Jar."""
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = loop or asyncio.get_running_loop()
+
+ @property
+ @abstractmethod
+ def quote_cookie(self) -> bool:
+ """Return True if cookies should be quoted."""
+
+ @abstractmethod
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ """Clear all cookies if no predicate is passed."""
+
+ @abstractmethod
+ def clear_domain(self, domain: str) -> None:
+ """Clear all cookies for domain and all subdomains."""
+
+ @abstractmethod
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+
+ def update_cookies_from_headers(
+ self, headers: Sequence[str], response_url: URL
+ ) -> None:
+ """Update cookies from raw Set-Cookie headers."""
+ if headers and (cookies_to_update := parse_set_cookie_headers(headers)):
+ self.update_cookies(cookies_to_update, response_url)
+
+ @abstractmethod
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ """Return the jar's cookies filtered by their attributes."""
+
+
+class AbstractStreamWriter(ABC):
+ """Abstract stream writer."""
+
+ buffer_size: int = 0
+ output_size: int = 0
+ length: Optional[int] = 0
+
+ @abstractmethod
+ async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
+ """Write chunk into stream."""
+
+ @abstractmethod
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ """Write last chunk."""
+
+ @abstractmethod
+ async def drain(self) -> None:
+ """Flush the write buffer."""
+
+ @abstractmethod
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: Optional[int] = None
+ ) -> None:
+ """Enable HTTP body compression"""
+
+ @abstractmethod
+ def enable_chunking(self) -> None:
+ """Enable HTTP chunked mode"""
+
+ @abstractmethod
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write HTTP headers"""
+
+ def send_headers(self) -> None:
+ """Force sending buffered headers if not already sent.
+
+ Required only if write_headers() buffers headers instead of sending immediately.
+ For backwards compatibility, this method does nothing by default.
+ """
+
+
+class AbstractAccessLogger(ABC):
+ """Abstract writer to access log."""
+
+ __slots__ = ("logger", "log_format")
+
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
+ self.logger = logger
+ self.log_format = log_format
+
+ @abstractmethod
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ """Emit log to logger."""
+
+ @property
+ def enabled(self) -> bool:
+ """Check if logger is enabled."""
+ return True
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/base_protocol.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/base_protocol.py"
new file mode 100644
index 0000000..b0a67ed
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/base_protocol.py"
@@ -0,0 +1,100 @@
+import asyncio
+from typing import Optional, cast
+
+from .client_exceptions import ClientConnectionResetError
+from .helpers import set_exception
+from .tcp_helpers import tcp_nodelay
+
+
+class BaseProtocol(asyncio.Protocol):
+ __slots__ = (
+ "_loop",
+ "_paused",
+ "_drain_waiter",
+ "_connection_lost",
+ "_reading_paused",
+ "transport",
+ )
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop: asyncio.AbstractEventLoop = loop
+ self._paused = False
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
+ self._reading_paused = False
+
+ self.transport: Optional[asyncio.Transport] = None
+
+ @property
+ def connected(self) -> bool:
+ """Return True if the connection is open."""
+ return self.transport is not None
+
+ @property
+ def writing_paused(self) -> bool:
+ return self._paused
+
+ def pause_writing(self) -> None:
+ assert not self._paused
+ self._paused = True
+
+ def resume_writing(self) -> None:
+ assert self._paused
+ self._paused = False
+
+ waiter = self._drain_waiter
+ if waiter is not None:
+ self._drain_waiter = None
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def pause_reading(self) -> None:
+ if not self._reading_paused and self.transport is not None:
+ try:
+ self.transport.pause_reading()
+ except (AttributeError, NotImplementedError, RuntimeError):
+ pass
+ self._reading_paused = True
+
+ def resume_reading(self) -> None:
+ if self._reading_paused and self.transport is not None:
+ try:
+ self.transport.resume_reading()
+ except (AttributeError, NotImplementedError, RuntimeError):
+ pass
+ self._reading_paused = False
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ tr = cast(asyncio.Transport, transport)
+ tcp_nodelay(tr, True)
+ self.transport = tr
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ # Wake up the writer if currently paused.
+ self.transport = None
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ return
+ self._drain_waiter = None
+ if waiter.done():
+ return
+ if exc is None:
+ waiter.set_result(None)
+ else:
+ set_exception(
+ waiter,
+ ConnectionError("Connection lost"),
+ exc,
+ )
+
+ async def _drain_helper(self) -> None:
+ if self.transport is None:
+ raise ClientConnectionResetError("Connection lost")
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ waiter = self._loop.create_future()
+ self._drain_waiter = waiter
+ await asyncio.shield(waiter)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client.py"
new file mode 100644
index 0000000..bc4ee17
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client.py"
@@ -0,0 +1,1635 @@
+"""HTTP Client for asyncio."""
+
+import asyncio
+import base64
+import hashlib
+import json
+import os
+import sys
+import traceback
+import warnings
+from contextlib import suppress
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Coroutine,
+ Final,
+ FrozenSet,
+ Generator,
+ Generic,
+ Iterable,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Type,
+ TypedDict,
+ TypeVar,
+ Union,
+)
+
+import attr
+from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs, http, payload
+from ._websocket.reader import WebSocketDataQueue
+from .abc import AbstractCookieJar
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientConnectionResetError,
+ ClientConnectorCertificateError,
+ ClientConnectorDNSError,
+ ClientConnectorError,
+ ClientConnectorSSLError,
+ ClientError,
+ ClientHttpProxyError,
+ ClientOSError,
+ ClientPayloadError,
+ ClientProxyConnectionError,
+ ClientResponseError,
+ ClientSSLError,
+ ConnectionTimeoutError,
+ ContentTypeError,
+ InvalidURL,
+ InvalidUrlClientError,
+ InvalidUrlRedirectClientError,
+ NonHttpUrlClientError,
+ NonHttpUrlRedirectClientError,
+ RedirectClientError,
+ ServerConnectionError,
+ ServerDisconnectedError,
+ ServerFingerprintMismatch,
+ ServerTimeoutError,
+ SocketTimeoutError,
+ TooManyRedirects,
+ WSMessageTypeError,
+ WSServerHandshakeError,
+)
+from .client_middlewares import ClientMiddlewareType, build_client_middlewares
+from .client_reqrep import (
+ ClientRequest as ClientRequest,
+ ClientResponse as ClientResponse,
+ Fingerprint as Fingerprint,
+ RequestInfo as RequestInfo,
+ _merge_ssl_params,
+)
+from .client_ws import (
+ DEFAULT_WS_CLIENT_TIMEOUT,
+ ClientWebSocketResponse as ClientWebSocketResponse,
+ ClientWSTimeout as ClientWSTimeout,
+)
+from .connector import (
+ HTTP_AND_EMPTY_SCHEMA_SET,
+ BaseConnector as BaseConnector,
+ NamedPipeConnector as NamedPipeConnector,
+ TCPConnector as TCPConnector,
+ UnixConnector as UnixConnector,
+)
+from .cookiejar import CookieJar
+from .helpers import (
+ _SENTINEL,
+ DEBUG,
+ EMPTY_BODY_METHODS,
+ BasicAuth,
+ TimeoutHandle,
+ basicauth_from_netrc,
+ get_env_proxy_for_url,
+ netrc_from_env,
+ sentinel,
+ strip_auth_from_url,
+)
+from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
+from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse
+from .tracing import Trace, TraceConfig
+from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL
+
+__all__ = (
+ # client_exceptions
+ "ClientConnectionError",
+ "ClientConnectionResetError",
+ "ClientConnectorCertificateError",
+ "ClientConnectorDNSError",
+ "ClientConnectorError",
+ "ClientConnectorSSLError",
+ "ClientError",
+ "ClientHttpProxyError",
+ "ClientOSError",
+ "ClientPayloadError",
+ "ClientProxyConnectionError",
+ "ClientResponseError",
+ "ClientSSLError",
+ "ConnectionTimeoutError",
+ "ContentTypeError",
+ "InvalidURL",
+ "InvalidUrlClientError",
+ "RedirectClientError",
+ "NonHttpUrlClientError",
+ "InvalidUrlRedirectClientError",
+ "NonHttpUrlRedirectClientError",
+ "ServerConnectionError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ServerTimeoutError",
+ "SocketTimeoutError",
+ "TooManyRedirects",
+ "WSServerHandshakeError",
+ # client_reqrep
+ "ClientRequest",
+ "ClientResponse",
+ "Fingerprint",
+ "RequestInfo",
+ # connector
+ "BaseConnector",
+ "TCPConnector",
+ "UnixConnector",
+ "NamedPipeConnector",
+ # client_ws
+ "ClientWebSocketResponse",
+ # client
+ "ClientSession",
+ "ClientTimeout",
+ "ClientWSTimeout",
+ "request",
+ "WSMessageTypeError",
+)
+
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ SSLContext = None
+
+if sys.version_info >= (3, 11) and TYPE_CHECKING:
+ from typing import Unpack
+
+
+class _RequestOptions(TypedDict, total=False):
+ params: Query
+ data: Any
+ json: Any
+ cookies: Union[LooseCookies, None]
+ headers: Union[LooseHeaders, None]
+ skip_auto_headers: Union[Iterable[str], None]
+ auth: Union[BasicAuth, None]
+ allow_redirects: bool
+ max_redirects: int
+ compress: Union[str, bool, None]
+ chunked: Union[bool, None]
+ expect100: bool
+ raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]]
+ read_until_eof: bool
+ proxy: Union[StrOrURL, None]
+ proxy_auth: Union[BasicAuth, None]
+ timeout: "Union[ClientTimeout, _SENTINEL, None]"
+ ssl: Union[SSLContext, bool, Fingerprint]
+ server_hostname: Union[str, None]
+ proxy_headers: Union[LooseHeaders, None]
+ trace_request_ctx: Union[Mapping[str, Any], None]
+ read_bufsize: Union[int, None]
+ auto_decompress: Union[bool, None]
+ max_line_size: Union[int, None]
+ max_field_size: Union[int, None]
+ middlewares: Optional[Sequence[ClientMiddlewareType]]
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ClientTimeout:
+ total: Optional[float] = None
+ connect: Optional[float] = None
+ sock_read: Optional[float] = None
+ sock_connect: Optional[float] = None
+ ceil_threshold: float = 5
+
+ # pool_queue_timeout: Optional[float] = None
+ # dns_resolution_timeout: Optional[float] = None
+ # socket_connect_timeout: Optional[float] = None
+ # connection_acquiring_timeout: Optional[float] = None
+ # new_connection_timeout: Optional[float] = None
+ # http_header_timeout: Optional[float] = None
+ # response_body_timeout: Optional[float] = None
+
+ # to create a timeout specific for a single request, either
+ # - create a completely new one to overwrite the default
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
+ # to overwrite the defaults
+
+
+# 5 Minute default read timeout
+DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30)
+
+# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2
+IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"})
+
+_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse)
+_CharsetResolver = Callable[[ClientResponse, bytes], str]
+
+
+class ClientSession:
+ """First-class interface for making HTTP requests."""
+
+ ATTRS = frozenset(
+ [
+ "_base_url",
+ "_base_url_origin",
+ "_source_traceback",
+ "_connector",
+ "_loop",
+ "_cookie_jar",
+ "_connector_owner",
+ "_default_auth",
+ "_version",
+ "_json_serialize",
+ "_requote_redirect_url",
+ "_timeout",
+ "_raise_for_status",
+ "_auto_decompress",
+ "_trust_env",
+ "_default_headers",
+ "_skip_auto_headers",
+ "_request_class",
+ "_response_class",
+ "_ws_response_class",
+ "_trace_configs",
+ "_read_bufsize",
+ "_max_line_size",
+ "_max_field_size",
+ "_resolve_charset",
+ "_default_proxy",
+ "_default_proxy_auth",
+ "_retry_connection",
+ "_middlewares",
+ "requote_redirect_url",
+ ]
+ )
+
+ _source_traceback: Optional[traceback.StackSummary] = None
+ _connector: Optional[BaseConnector] = None
+
+ def __init__(
+ self,
+ base_url: Optional[StrOrURL] = None,
+ *,
+ connector: Optional[BaseConnector] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ cookies: Optional[LooseCookies] = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ json_serialize: JSONEncoder = json.dumps,
+ request_class: Type[ClientRequest] = ClientRequest,
+ response_class: Type[ClientResponse] = ClientResponse,
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
+ version: HttpVersion = http.HttpVersion11,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ connector_owner: bool = True,
+ raise_for_status: Union[
+ bool, Callable[[ClientResponse], Awaitable[None]]
+ ] = False,
+ read_timeout: Union[float, _SENTINEL] = sentinel,
+ conn_timeout: Optional[float] = None,
+ timeout: Union[object, ClientTimeout] = sentinel,
+ auto_decompress: bool = True,
+ trust_env: bool = False,
+ requote_redirect_url: bool = True,
+ trace_configs: Optional[List[TraceConfig]] = None,
+ read_bufsize: int = 2**16,
+ max_line_size: int = 8190,
+ max_field_size: int = 8190,
+ fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
+ middlewares: Sequence[ClientMiddlewareType] = (),
+ ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel,
+ ) -> None:
+ # We initialise _connector to None immediately, as it's referenced in __del__()
+ # and could cause issues if an exception occurs during initialisation.
+ self._connector: Optional[BaseConnector] = None
+
+ if loop is None:
+ if connector is not None:
+ loop = connector._loop
+
+ loop = loop or asyncio.get_running_loop()
+
+ if base_url is None or isinstance(base_url, URL):
+ self._base_url: Optional[URL] = base_url
+ self._base_url_origin = None if base_url is None else base_url.origin()
+ else:
+ self._base_url = URL(base_url)
+ self._base_url_origin = self._base_url.origin()
+ assert self._base_url.absolute, "Only absolute URLs are supported"
+ if self._base_url is not None and not self._base_url.path.endswith("/"):
+ raise ValueError("base_url must have a trailing '/'")
+
+ if timeout is sentinel or timeout is None:
+ self._timeout = DEFAULT_TIMEOUT
+ if read_timeout is not sentinel:
+ warnings.warn(
+ "read_timeout is deprecated, use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
+ if conn_timeout is not None:
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
+ warnings.warn(
+ "conn_timeout is deprecated, use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ if not isinstance(timeout, ClientTimeout):
+ raise ValueError(
+ f"timeout parameter cannot be of {type(timeout)} type, "
+ "please use 'timeout=ClientTimeout(...)'",
+ )
+ self._timeout = timeout
+ if read_timeout is not sentinel:
+ raise ValueError(
+ "read_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.read"
+ )
+ if conn_timeout is not None:
+ raise ValueError(
+ "conn_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.connect"
+ )
+
+ if ssl_shutdown_timeout is not sentinel:
+ warnings.warn(
+ "The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if connector is None:
+ connector = TCPConnector(
+ loop=loop, ssl_shutdown_timeout=ssl_shutdown_timeout
+ )
+
+ if connector._loop is not loop:
+ raise RuntimeError("Session and connector has to use same event loop")
+
+ self._loop = loop
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ if cookie_jar is None:
+ cookie_jar = CookieJar(loop=loop)
+ self._cookie_jar = cookie_jar
+
+ if cookies:
+ self._cookie_jar.update_cookies(cookies)
+
+ self._connector = connector
+ self._connector_owner = connector_owner
+ self._default_auth = auth
+ self._version = version
+ self._json_serialize = json_serialize
+ self._raise_for_status = raise_for_status
+ self._auto_decompress = auto_decompress
+ self._trust_env = trust_env
+ self._requote_redirect_url = requote_redirect_url
+ self._read_bufsize = read_bufsize
+ self._max_line_size = max_line_size
+ self._max_field_size = max_field_size
+
+ # Convert to list of tuples
+ if headers:
+ real_headers: CIMultiDict[str] = CIMultiDict(headers)
+ else:
+ real_headers = CIMultiDict()
+ self._default_headers: CIMultiDict[str] = real_headers
+ if skip_auto_headers is not None:
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
+ else:
+ self._skip_auto_headers = frozenset()
+
+ self._request_class = request_class
+ self._response_class = response_class
+ self._ws_response_class = ws_response_class
+
+ self._trace_configs = trace_configs or []
+ for trace_config in self._trace_configs:
+ trace_config.freeze()
+
+ self._resolve_charset = fallback_charset_resolver
+
+ self._default_proxy = proxy
+ self._default_proxy_auth = proxy_auth
+ self._retry_connection: bool = True
+ self._middlewares = middlewares
+
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
+ warnings.warn(
+ "Inheritance class {} from ClientSession "
+ "is discouraged".format(cls.__name__),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if DEBUG:
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom ClientSession.{} attribute "
+ "is discouraged".format(name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if not self.closed:
+ kwargs = {"source": self}
+ _warnings.warn(
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
+ )
+ context = {"client_session": self, "message": "Unclosed client session"}
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
+
+ def request(
+ self,
+ method: str,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ else:
+
+ def request(
+ self, method: str, url: StrOrURL, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP request."""
+ return _RequestContextManager(self._request(method, url, **kwargs))
+
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
+ url = URL(str_or_url)
+ if self._base_url and not url.absolute:
+ return self._base_url.join(url)
+ return url
+
+ async def _request(
+ self,
+ method: str,
+ str_or_url: StrOrURL,
+ *,
+ params: Query = None,
+ data: Any = None,
+ json: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ allow_redirects: bool = True,
+ max_redirects: int = 10,
+ compress: Union[str, bool, None] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ raise_for_status: Union[
+ None, bool, Callable[[ClientResponse], Awaitable[None]]
+ ] = None,
+ read_until_eof: bool = True,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ server_hostname: Optional[str] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ trace_request_ctx: Optional[Mapping[str, Any]] = None,
+ read_bufsize: Optional[int] = None,
+ auto_decompress: Optional[bool] = None,
+ max_line_size: Optional[int] = None,
+ max_field_size: Optional[int] = None,
+ middlewares: Optional[Sequence[ClientMiddlewareType]] = None,
+ ) -> ClientResponse:
+
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
+ # set the default to None because we need to detect if the user wants
+ # to use the existing timeouts by setting timeout to None.
+
+ if self.closed:
+ raise RuntimeError("Session is closed")
+
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ if data is not None and json is not None:
+ raise ValueError(
+ "data and json parameters can not be used at the same time"
+ )
+ elif json is not None:
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
+
+ if not isinstance(chunked, bool) and chunked is not None:
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
+
+ redirects = 0
+ history: List[ClientResponse] = []
+ version = self._version
+ params = params or {}
+
+ # Merge with default headers and transform to CIMultiDict
+ headers = self._prepare_headers(headers)
+
+ try:
+ url = self._build_url(str_or_url)
+ except ValueError as e:
+ raise InvalidUrlClientError(str_or_url) from e
+
+ assert self._connector is not None
+ if url.scheme not in self._connector.allowed_protocol_schema_set:
+ raise NonHttpUrlClientError(url)
+
+ skip_headers: Optional[Iterable[istr]]
+ if skip_auto_headers is not None:
+ skip_headers = {
+ istr(i) for i in skip_auto_headers
+ } | self._skip_auto_headers
+ elif self._skip_auto_headers:
+ skip_headers = self._skip_auto_headers
+ else:
+ skip_headers = None
+
+ if proxy is None:
+ proxy = self._default_proxy
+ if proxy_auth is None:
+ proxy_auth = self._default_proxy_auth
+
+ if proxy is None:
+ proxy_headers = None
+ else:
+ proxy_headers = self._prepare_headers(proxy_headers)
+ try:
+ proxy = URL(proxy)
+ except ValueError as e:
+ raise InvalidURL(proxy) from e
+
+ if timeout is sentinel:
+ real_timeout: ClientTimeout = self._timeout
+ else:
+ if not isinstance(timeout, ClientTimeout):
+ real_timeout = ClientTimeout(total=timeout)
+ else:
+ real_timeout = timeout
+ # timeout is cumulative for all request operations
+ # (request, redirects, responses, data consuming)
+ tm = TimeoutHandle(
+ self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
+ )
+ handle = tm.start()
+
+ if read_bufsize is None:
+ read_bufsize = self._read_bufsize
+
+ if auto_decompress is None:
+ auto_decompress = self._auto_decompress
+
+ if max_line_size is None:
+ max_line_size = self._max_line_size
+
+ if max_field_size is None:
+ max_field_size = self._max_field_size
+
+ traces = [
+ Trace(
+ self,
+ trace_config,
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
+ )
+ for trace_config in self._trace_configs
+ ]
+
+ for trace in traces:
+ await trace.send_request_start(method, url.update_query(params), headers)
+
+ timer = tm.timer()
+ try:
+ with timer:
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests
+ retry_persistent_connection = (
+ self._retry_connection and method in IDEMPOTENT_METHODS
+ )
+ while True:
+ url, auth_from_url = strip_auth_from_url(url)
+ if not url.raw_host:
+ # NOTE: Bail early, otherwise, causes `InvalidURL` through
+ # NOTE: `self._request_class()` below.
+ err_exc_cls = (
+ InvalidUrlRedirectClientError
+ if redirects
+ else InvalidUrlClientError
+ )
+ raise err_exc_cls(url)
+ # If `auth` was passed for an already authenticated URL,
+ # disallow only if this is the initial URL; this is to avoid issues
+ # with sketchy redirects that are not the caller's responsibility
+ if not history and (auth and auth_from_url):
+ raise ValueError(
+ "Cannot combine AUTH argument with "
+ "credentials encoded in URL"
+ )
+
+ # Override the auth with the one from the URL only if we
+ # have no auth, or if we got an auth from a redirect URL
+ if auth is None or (history and auth_from_url is not None):
+ auth = auth_from_url
+
+ if (
+ auth is None
+ and self._default_auth
+ and (
+ not self._base_url or self._base_url_origin == url.origin()
+ )
+ ):
+ auth = self._default_auth
+
+ # Try netrc if auth is still None and trust_env is enabled.
+ if auth is None and self._trust_env and url.host is not None:
+ auth = await self._loop.run_in_executor(
+ None, self._get_netrc_auth, url.host
+ )
+
+ # It would be confusing if we support explicit
+ # Authorization header with auth argument
+ if (
+ headers is not None
+ and auth is not None
+ and hdrs.AUTHORIZATION in headers
+ ):
+ raise ValueError(
+ "Cannot combine AUTHORIZATION header "
+ "with AUTH argument or credentials "
+ "encoded in URL"
+ )
+
+ all_cookies = self._cookie_jar.filter_cookies(url)
+
+ if cookies is not None:
+ tmp_cookie_jar = CookieJar(
+ quote_cookie=self._cookie_jar.quote_cookie
+ )
+ tmp_cookie_jar.update_cookies(cookies)
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
+ if req_cookies:
+ all_cookies.load(req_cookies)
+
+ proxy_: Optional[URL] = None
+ if proxy is not None:
+ proxy_ = URL(proxy)
+ elif self._trust_env:
+ with suppress(LookupError):
+ proxy_, proxy_auth = await asyncio.to_thread(
+ get_env_proxy_for_url, url
+ )
+
+ req = self._request_class(
+ method,
+ url,
+ params=params,
+ headers=headers,
+ skip_auto_headers=skip_headers,
+ data=data,
+ cookies=all_cookies,
+ auth=auth,
+ version=version,
+ compress=compress,
+ chunked=chunked,
+ expect100=expect100,
+ loop=self._loop,
+ response_class=self._response_class,
+ proxy=proxy_,
+ proxy_auth=proxy_auth,
+ timer=timer,
+ session=self,
+ ssl=ssl if ssl is not None else True,
+ server_hostname=server_hostname,
+ proxy_headers=proxy_headers,
+ traces=traces,
+ trust_env=self.trust_env,
+ )
+
+ async def _connect_and_send_request(
+ req: ClientRequest,
+ ) -> ClientResponse:
+ # connection timeout
+ assert self._connector is not None
+ try:
+ conn = await self._connector.connect(
+ req, traces=traces, timeout=real_timeout
+ )
+ except asyncio.TimeoutError as exc:
+ raise ConnectionTimeoutError(
+ f"Connection timeout to host {req.url}"
+ ) from exc
+
+ assert conn.protocol is not None
+ conn.protocol.set_response_params(
+ timer=timer,
+ skip_payload=req.method in EMPTY_BODY_METHODS,
+ read_until_eof=read_until_eof,
+ auto_decompress=auto_decompress,
+ read_timeout=real_timeout.sock_read,
+ read_bufsize=read_bufsize,
+ timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
+ )
+ try:
+ resp = await req.send(conn)
+ try:
+ await resp.start(conn)
+ except BaseException:
+ resp.close()
+ raise
+ except BaseException:
+ conn.close()
+ raise
+ return resp
+
+ # Apply middleware (if any) - per-request middleware overrides session middleware
+ effective_middlewares = (
+ self._middlewares if middlewares is None else middlewares
+ )
+
+ if effective_middlewares:
+ handler = build_client_middlewares(
+ _connect_and_send_request, effective_middlewares
+ )
+ else:
+ handler = _connect_and_send_request
+
+ try:
+ resp = await handler(req)
+ # Client connector errors should not be retried
+ except (
+ ConnectionTimeoutError,
+ ClientConnectorError,
+ ClientConnectorCertificateError,
+ ClientConnectorSSLError,
+ ):
+ raise
+ except (ClientOSError, ServerDisconnectedError):
+ if retry_persistent_connection:
+ retry_persistent_connection = False
+ continue
+ raise
+ except ClientError:
+ raise
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise ClientOSError(*exc.args) from exc
+
+ # Update cookies from raw headers to preserve duplicates
+ if resp._raw_cookie_headers:
+ self._cookie_jar.update_cookies_from_headers(
+ resp._raw_cookie_headers, resp.url
+ )
+
+ # redirects
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
+
+ for trace in traces:
+ await trace.send_request_redirect(
+ method, url.update_query(params), headers, resp
+ )
+
+ redirects += 1
+ history.append(resp)
+ if max_redirects and redirects >= max_redirects:
+ if req._body is not None:
+ await req._body.close()
+ resp.close()
+ raise TooManyRedirects(
+ history[0].request_info, tuple(history)
+ )
+
+ # For 301 and 302, mimic IE, now changed in RFC
+ # https://github.com/kennethreitz/requests/pull/269
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
+ ):
+ method = hdrs.METH_GET
+ data = None
+ if headers.get(hdrs.CONTENT_LENGTH):
+ headers.pop(hdrs.CONTENT_LENGTH)
+ else:
+ # For 307/308, always preserve the request body
+ # For 301/302 with non-POST methods, preserve the request body
+ # https://www.rfc-editor.org/rfc/rfc9110#section-15.4.3-3.1
+ # Use the existing payload to avoid recreating it from a potentially consumed file
+ data = req._body
+
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
+ hdrs.URI
+ )
+ if r_url is None:
+ # see github.com/aio-libs/aiohttp/issues/2022
+ break
+ else:
+ # reading from correct redirection
+ # response is forbidden
+ resp.release()
+
+ try:
+ parsed_redirect_url = URL(
+ r_url, encoded=not self._requote_redirect_url
+ )
+ except ValueError as e:
+ if req._body is not None:
+ await req._body.close()
+ resp.close()
+ raise InvalidUrlRedirectClientError(
+ r_url,
+ "Server attempted redirecting to a location that does not look like a URL",
+ ) from e
+
+ scheme = parsed_redirect_url.scheme
+ if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
+ if req._body is not None:
+ await req._body.close()
+ resp.close()
+ raise NonHttpUrlRedirectClientError(r_url)
+ elif not scheme:
+ parsed_redirect_url = url.join(parsed_redirect_url)
+
+ try:
+ redirect_origin = parsed_redirect_url.origin()
+ except ValueError as origin_val_err:
+ if req._body is not None:
+ await req._body.close()
+ resp.close()
+ raise InvalidUrlRedirectClientError(
+ parsed_redirect_url,
+ "Invalid redirect URL origin",
+ ) from origin_val_err
+
+ if url.origin() != redirect_origin:
+ auth = None
+ headers.pop(hdrs.AUTHORIZATION, None)
+
+ url = parsed_redirect_url
+ params = {}
+ resp.release()
+ continue
+
+ break
+
+ if req._body is not None:
+ await req._body.close()
+ # check response status
+ if raise_for_status is None:
+ raise_for_status = self._raise_for_status
+
+ if raise_for_status is None:
+ pass
+ elif callable(raise_for_status):
+ await raise_for_status(resp)
+ elif raise_for_status:
+ resp.raise_for_status()
+
+ # register connection
+ if handle is not None:
+ if resp.connection is not None:
+ resp.connection.add_callback(handle.cancel)
+ else:
+ handle.cancel()
+
+ resp._history = tuple(history)
+
+ for trace in traces:
+ await trace.send_request_end(
+ method, url.update_query(params), headers, resp
+ )
+ return resp
+
+ except BaseException as e:
+ # cleanup timer
+ tm.close()
+ if handle:
+ handle.cancel()
+ handle = None
+
+ for trace in traces:
+ await trace.send_request_exception(
+ method, url.update_query(params), headers, e
+ )
+ raise
+
+ def ws_connect(
+ self,
+ url: StrOrURL,
+ *,
+ method: str = hdrs.METH_GET,
+ protocols: Iterable[str] = (),
+ timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ auth: Optional[BasicAuth] = None,
+ origin: Optional[str] = None,
+ params: Query = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ server_hostname: Optional[str] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ compress: int = 0,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> "_WSRequestContextManager":
+ """Initiate websocket connection."""
+ return _WSRequestContextManager(
+ self._ws_connect(
+ url,
+ method=method,
+ protocols=protocols,
+ timeout=timeout,
+ receive_timeout=receive_timeout,
+ autoclose=autoclose,
+ autoping=autoping,
+ heartbeat=heartbeat,
+ auth=auth,
+ origin=origin,
+ params=params,
+ headers=headers,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ ssl=ssl,
+ verify_ssl=verify_ssl,
+ fingerprint=fingerprint,
+ ssl_context=ssl_context,
+ server_hostname=server_hostname,
+ proxy_headers=proxy_headers,
+ compress=compress,
+ max_msg_size=max_msg_size,
+ )
+ )
+
+ async def _ws_connect(
+ self,
+ url: StrOrURL,
+ *,
+ method: str = hdrs.METH_GET,
+ protocols: Iterable[str] = (),
+ timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ auth: Optional[BasicAuth] = None,
+ origin: Optional[str] = None,
+ params: Query = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ server_hostname: Optional[str] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ compress: int = 0,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> ClientWebSocketResponse:
+ if timeout is not sentinel:
+ if isinstance(timeout, ClientWSTimeout):
+ ws_timeout = timeout
+ else:
+ warnings.warn(
+ "parameter 'timeout' of type 'float' "
+ "is deprecated, please use "
+ "'timeout=ClientWSTimeout(ws_close=...)'",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ ws_timeout = ClientWSTimeout(ws_close=timeout)
+ else:
+ ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
+ if receive_timeout is not None:
+ warnings.warn(
+ "float parameter 'receive_timeout' "
+ "is deprecated, please use parameter "
+ "'timeout=ClientWSTimeout(ws_receive=...)'",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout)
+
+ if headers is None:
+ real_headers: CIMultiDict[str] = CIMultiDict()
+ else:
+ real_headers = CIMultiDict(headers)
+
+ default_headers = {
+ hdrs.UPGRADE: "websocket",
+ hdrs.CONNECTION: "Upgrade",
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
+ }
+
+ for key, value in default_headers.items():
+ real_headers.setdefault(key, value)
+
+ sec_key = base64.b64encode(os.urandom(16))
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
+
+ if protocols:
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
+ if origin is not None:
+ real_headers[hdrs.ORIGIN] = origin
+ if compress:
+ extstr = ws_ext_gen(compress=compress)
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
+
+ # For the sake of backward compatibility, if user passes in None, convert it to True
+ if ssl is None:
+ warnings.warn(
+ "ssl=None is deprecated, please use ssl=True",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ ssl = True
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ # send request
+ resp = await self.request(
+ method,
+ url,
+ params=params,
+ headers=real_headers,
+ read_until_eof=False,
+ auth=auth,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ ssl=ssl,
+ server_hostname=server_hostname,
+ proxy_headers=proxy_headers,
+ )
+
+ try:
+ # check handshake
+ if resp.status != 101:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid response status",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid upgrade header",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid connection header",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ # key calculation
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
+ if r_key != match:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid challenge response",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ # websocket protocol
+ protocol = None
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
+ resp_protocols = [
+ proto.strip()
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
+ ]
+
+ for proto in resp_protocols:
+ if proto in protocols:
+ protocol = proto
+ break
+
+ # websocket compress
+ notakeover = False
+ if compress:
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
+ if compress_hdrs:
+ try:
+ compress, notakeover = ws_ext_parse(compress_hdrs)
+ except WSHandshakeError as exc:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message=exc.args[0],
+ status=resp.status,
+ headers=resp.headers,
+ ) from exc
+ else:
+ compress = 0
+ notakeover = False
+
+ conn = resp.connection
+ assert conn is not None
+ conn_proto = conn.protocol
+ assert conn_proto is not None
+
+ # For WS connection the read_timeout must be either receive_timeout or greater
+ # None == no timeout, i.e. infinite timeout, so None is the max timeout possible
+ if ws_timeout.ws_receive is None:
+ # Reset regardless
+ conn_proto.read_timeout = None
+ elif conn_proto.read_timeout is not None:
+ conn_proto.read_timeout = max(
+ ws_timeout.ws_receive, conn_proto.read_timeout
+ )
+
+ transport = conn.transport
+ assert transport is not None
+ reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop)
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
+ writer = WebSocketWriter(
+ conn_proto,
+ transport,
+ use_mask=True,
+ compress=compress,
+ notakeover=notakeover,
+ )
+ except BaseException:
+ resp.close()
+ raise
+ else:
+ return self._ws_response_class(
+ reader,
+ writer,
+ protocol,
+ resp,
+ ws_timeout,
+ autoclose,
+ autoping,
+ self._loop,
+ heartbeat=heartbeat,
+ compress=compress,
+ client_notakeover=notakeover,
+ )
+
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
+ """Add default headers and transform it to CIMultiDict"""
+ # Convert headers to MultiDict
+ result = CIMultiDict(self._default_headers)
+ if headers:
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
+ headers = CIMultiDict(headers)
+ added_names: Set[str] = set()
+ for key, value in headers.items():
+ if key in added_names:
+ result.add(key, value)
+ else:
+ result[key] = value
+ added_names.add(key)
+ return result
+
+ def _get_netrc_auth(self, host: str) -> Optional[BasicAuth]:
+ """
+ Get auth from netrc for the given host.
+
+ This method is designed to be called in an executor to avoid
+ blocking I/O in the event loop.
+ """
+ netrc_obj = netrc_from_env()
+ try:
+ return basicauth_from_netrc(netrc_obj, host)
+ except LookupError:
+ return None
+
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
+
+ def get(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ def options(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ def head(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ def post(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ def put(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ def patch(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ def delete(
+ self,
+ url: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> "_RequestContextManager": ...
+
+ else:
+
+ def get(
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP GET request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def options(
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP OPTIONS request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def head(
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP HEAD request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def post(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP POST request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
+ )
+
+ def put(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP PUT request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
+ )
+
+ def patch(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
+ )
+
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
+ """Perform HTTP DELETE request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_DELETE, url, **kwargs)
+ )
+
+ async def close(self) -> None:
+ """Close underlying connector.
+
+ Release all acquired resources.
+ """
+ if not self.closed:
+ if self._connector is not None and self._connector_owner:
+ await self._connector.close()
+ self._connector = None
+
+ @property
+ def closed(self) -> bool:
+ """Is client session closed.
+
+ A readonly property.
+ """
+ return self._connector is None or self._connector.closed
+
+ @property
+ def connector(self) -> Optional[BaseConnector]:
+ """Connector instance used for the session."""
+ return self._connector
+
+ @property
+ def cookie_jar(self) -> AbstractCookieJar:
+ """The session cookies."""
+ return self._cookie_jar
+
+ @property
+ def version(self) -> Tuple[int, int]:
+ """The session HTTP protocol version."""
+ return self._version
+
+ @property
+ def requote_redirect_url(self) -> bool:
+ """Do URL requoting on redirection handling."""
+ return self._requote_redirect_url
+
+ @requote_redirect_url.setter
+ def requote_redirect_url(self, val: bool) -> None:
+ """Do URL requoting on redirection handling."""
+ warnings.warn(
+ "session.requote_redirect_url modification is deprecated #2778",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._requote_redirect_url = val
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ """Session's loop."""
+ warnings.warn(
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ @property
+ def timeout(self) -> ClientTimeout:
+ """Timeout for the session."""
+ return self._timeout
+
+ @property
+ def headers(self) -> "CIMultiDict[str]":
+ """The default headers of the client session."""
+ return self._default_headers
+
+ @property
+ def skip_auto_headers(self) -> FrozenSet[istr]:
+ """Headers for which autogeneration should be skipped"""
+ return self._skip_auto_headers
+
+ @property
+ def auth(self) -> Optional[BasicAuth]:
+ """An object that represents HTTP Basic Authorization"""
+ return self._default_auth
+
+ @property
+ def json_serialize(self) -> JSONEncoder:
+ """Json serializer callable"""
+ return self._json_serialize
+
+ @property
+ def connector_owner(self) -> bool:
+ """Should connector be closed on session closing"""
+ return self._connector_owner
+
+ @property
+ def raise_for_status(
+ self,
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
+ return self._raise_for_status
+
+ @property
+ def auto_decompress(self) -> bool:
+ """Should the body response be automatically decompressed."""
+ return self._auto_decompress
+
+ @property
+ def trust_env(self) -> bool:
+ """
+ Should proxies information from environment or netrc be trusted.
+
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
+ or ~/.netrc file if present.
+ """
+ return self._trust_env
+
+ @property
+ def trace_configs(self) -> List[TraceConfig]:
+ """A list of TraceConfig instances used for client tracing"""
+ return self._trace_configs
+
+ def detach(self) -> None:
+ """Detach connector from session without closing the former.
+
+ Session is switched to closed state anyway.
+ """
+ self._connector = None
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "ClientSession":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
+
+ __slots__ = ("_coro", "_resp")
+
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
+ self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro
+
+ def send(self, arg: None) -> "asyncio.Future[Any]":
+ return self._coro.send(arg)
+
+ def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
+ return self._coro.throw(*args, **kwargs)
+
+ def close(self) -> None:
+ return self._coro.close()
+
+ def __await__(self) -> Generator[Any, None, _RetType]:
+ ret = self._coro.__await__()
+ return ret
+
+ def __iter__(self) -> Generator[Any, None, _RetType]:
+ return self.__await__()
+
+ async def __aenter__(self) -> _RetType:
+ self._resp: _RetType = await self._coro
+ return await self._resp.__aenter__()
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self._resp.__aexit__(exc_type, exc, tb)
+
+
+_RequestContextManager = _BaseRequestContextManager[ClientResponse]
+_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse]
+
+
+class _SessionRequestContextManager:
+
+ __slots__ = ("_coro", "_resp", "_session")
+
+ def __init__(
+ self,
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
+ session: ClientSession,
+ ) -> None:
+ self._coro = coro
+ self._resp: Optional[ClientResponse] = None
+ self._session = session
+
+ async def __aenter__(self) -> ClientResponse:
+ try:
+ self._resp = await self._coro
+ except BaseException:
+ await self._session.close()
+ raise
+ else:
+ return self._resp
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ assert self._resp is not None
+ self._resp.close()
+ await self._session.close()
+
+
+if sys.version_info >= (3, 11) and TYPE_CHECKING:
+
+ def request(
+ method: str,
+ url: StrOrURL,
+ *,
+ version: HttpVersion = http.HttpVersion11,
+ connector: Optional[BaseConnector] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _SessionRequestContextManager: ...
+
+else:
+
+ def request(
+ method: str,
+ url: StrOrURL,
+ *,
+ version: HttpVersion = http.HttpVersion11,
+ connector: Optional[BaseConnector] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+ ) -> _SessionRequestContextManager:
+ """Constructs and sends a request.
+
+ Returns response object.
+ method - HTTP method
+ url - request url
+ params - (optional) Dictionary or bytes to be sent in the query
+ string of the new request
+ data - (optional) Dictionary, bytes, or file-like object to
+ send in the body of the request
+ json - (optional) Any json compatible python object
+ headers - (optional) Dictionary of HTTP Headers to send with
+ the request
+ cookies - (optional) Dict object to send with the request
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
+ auth - aiohttp.helpers.BasicAuth
+ allow_redirects - (optional) If set to False, do not follow
+ redirects
+ version - Request HTTP version.
+ compress - Set to True if request has to be compressed
+ with deflate encoding.
+ chunked - Set to chunk size for chunked transfer encoding.
+ expect100 - Expect 100-continue response from server.
+ connector - BaseConnector sub-class instance to support
+ connection pooling.
+ read_until_eof - Read response until eof if response
+ does not have Content-Length header.
+ loop - Optional event loop.
+ timeout - Optional ClientTimeout settings structure, 5min
+ total timeout by default.
+ Usage::
+ >>> import aiohttp
+ >>> async with aiohttp.request('GET', 'http://python.org/') as resp:
+ ... print(resp)
+ ... data = await resp.read()
+ <ClientResponse(https://www.python.org/) [200 OK]>
+ """
+ connector_owner = False
+ if connector is None:
+ connector_owner = True
+ connector = TCPConnector(loop=loop, force_close=True)
+
+ session = ClientSession(
+ loop=loop,
+ cookies=kwargs.pop("cookies", None),
+ version=version,
+ timeout=kwargs.pop("timeout", sentinel),
+ connector=connector,
+ connector_owner=connector_owner,
+ )
+
+ return _SessionRequestContextManager(
+ session._request(method, url, **kwargs),
+ session,
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_exceptions.py"
new file mode 100644
index 0000000..1d298e9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_exceptions.py"
@@ -0,0 +1,421 @@
+"""HTTP related errors."""
+
+import asyncio
+import warnings
+from typing import TYPE_CHECKING, Optional, Tuple, Union
+
+from multidict import MultiMapping
+
+from .typedefs import StrOrURL
+
+if TYPE_CHECKING:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+else:
+ try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+ except ImportError: # pragma: no cover
+ ssl = SSLContext = None # type: ignore[assignment]
+
+if TYPE_CHECKING:
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
+ from .http_parser import RawResponseMessage
+else:
+ RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None
+
+__all__ = (
+ "ClientError",
+ "ClientConnectionError",
+ "ClientConnectionResetError",
+ "ClientOSError",
+ "ClientConnectorError",
+ "ClientProxyConnectionError",
+ "ClientSSLError",
+ "ClientConnectorDNSError",
+ "ClientConnectorSSLError",
+ "ClientConnectorCertificateError",
+ "ConnectionTimeoutError",
+ "SocketTimeoutError",
+ "ServerConnectionError",
+ "ServerTimeoutError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ClientResponseError",
+ "ClientHttpProxyError",
+ "WSServerHandshakeError",
+ "ContentTypeError",
+ "ClientPayloadError",
+ "InvalidURL",
+ "InvalidUrlClientError",
+ "RedirectClientError",
+ "NonHttpUrlClientError",
+ "InvalidUrlRedirectClientError",
+ "NonHttpUrlRedirectClientError",
+ "WSMessageTypeError",
+)
+
+
+class ClientError(Exception):
+ """Base class for client connection errors."""
+
+
+class ClientResponseError(ClientError):
+ """Base class for exceptions that occur after getting a response.
+
+ request_info: An instance of RequestInfo.
+ history: A sequence of responses, if redirects occurred.
+ status: HTTP status code.
+ message: Error message.
+ headers: Response headers.
+ """
+
+ def __init__(
+ self,
+ request_info: RequestInfo,
+ history: Tuple[ClientResponse, ...],
+ *,
+ code: Optional[int] = None,
+ status: Optional[int] = None,
+ message: str = "",
+ headers: Optional[MultiMapping[str]] = None,
+ ) -> None:
+ self.request_info = request_info
+ if code is not None:
+ if status is not None:
+ raise ValueError(
+ "Both code and status arguments are provided; "
+ "code is deprecated, use status instead"
+ )
+ warnings.warn(
+ "code argument is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if status is not None:
+ self.status = status
+ elif code is not None:
+ self.status = code
+ else:
+ self.status = 0
+ self.message = message
+ self.headers = headers
+ self.history = history
+ self.args = (request_info, history)
+
+ def __str__(self) -> str:
+ return "{}, message={!r}, url={!r}".format(
+ self.status,
+ self.message,
+ str(self.request_info.real_url),
+ )
+
+ def __repr__(self) -> str:
+ args = f"{self.request_info!r}, {self.history!r}"
+ if self.status != 0:
+ args += f", status={self.status!r}"
+ if self.message != "":
+ args += f", message={self.message!r}"
+ if self.headers is not None:
+ args += f", headers={self.headers!r}"
+ return f"{type(self).__name__}({args})"
+
+ @property
+ def code(self) -> int:
+ warnings.warn(
+ "code property is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.status
+
+ @code.setter
+ def code(self, value: int) -> None:
+ warnings.warn(
+ "code property is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.status = value
+
+
+class ContentTypeError(ClientResponseError):
+ """ContentType found is not valid."""
+
+
+class WSServerHandshakeError(ClientResponseError):
+ """websocket server handshake error."""
+
+
+class ClientHttpProxyError(ClientResponseError):
+ """HTTP proxy error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ proxy responds with status other than ``200 OK``
+ on ``CONNECT`` request.
+ """
+
+
+class TooManyRedirects(ClientResponseError):
+ """Client was redirected too many times."""
+
+
+class ClientConnectionError(ClientError):
+ """Base class for client socket errors."""
+
+
+class ClientConnectionResetError(ClientConnectionError, ConnectionResetError):
+ """ConnectionResetError"""
+
+
+class ClientOSError(ClientConnectionError, OSError):
+ """OSError error."""
+
+
+class ClientConnectorError(ClientOSError):
+ """Client connector error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ a connection can not be established.
+ """
+
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
+ self._conn_key = connection_key
+ self._os_error = os_error
+ super().__init__(os_error.errno, os_error.strerror)
+ self.args = (connection_key, os_error)
+
+ @property
+ def os_error(self) -> OSError:
+ return self._os_error
+
+ @property
+ def host(self) -> str:
+ return self._conn_key.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._conn_key.port
+
+ @property
+ def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
+ return self._conn_key.ssl
+
+ def __str__(self) -> str:
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
+ self, "default" if self.ssl is True else self.ssl, self.strerror
+ )
+
+ # OSError.__reduce__ does too much black magick
+ __reduce__ = BaseException.__reduce__
+
+
+class ClientConnectorDNSError(ClientConnectorError):
+ """DNS resolution failed during client connection.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ DNS resolution fails.
+ """
+
+
+class ClientProxyConnectionError(ClientConnectorError):
+ """Proxy connection error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ connection to proxy can not be established.
+ """
+
+
+class UnixClientConnectorError(ClientConnectorError):
+ """Unix connector error.
+
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
+ if connection to unix socket can not be established.
+ """
+
+ def __init__(
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
+ ) -> None:
+ self._path = path
+ super().__init__(connection_key, os_error)
+
+ @property
+ def path(self) -> str:
+ return self._path
+
+ def __str__(self) -> str:
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
+ self, "default" if self.ssl is True else self.ssl, self.strerror
+ )
+
+
+class ServerConnectionError(ClientConnectionError):
+ """Server connection errors."""
+
+
+class ServerDisconnectedError(ServerConnectionError):
+ """Server disconnected."""
+
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
+ if message is None:
+ message = "Server disconnected"
+
+ self.args = (message,)
+ self.message = message
+
+
+class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
+ """Server timeout error."""
+
+
+class ConnectionTimeoutError(ServerTimeoutError):
+ """Connection timeout error."""
+
+
+class SocketTimeoutError(ServerTimeoutError):
+ """Socket timeout error."""
+
+
+class ServerFingerprintMismatch(ServerConnectionError):
+ """SSL certificate does not match expected fingerprint."""
+
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
+ self.expected = expected
+ self.got = got
+ self.host = host
+ self.port = port
+ self.args = (expected, got, host, port)
+
+ def __repr__(self) -> str:
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
+ )
+
+
+class ClientPayloadError(ClientError):
+ """Response payload error."""
+
+
+class InvalidURL(ClientError, ValueError):
+ """Invalid URL.
+
+ URL used for fetching is malformed, e.g. it doesn't contains host
+ part.
+ """
+
+ # Derive from ValueError for backward compatibility
+
+ def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None:
+ # The type of url is not yarl.URL because the exception can be raised
+ # on URL(url) call
+ self._url = url
+ self._description = description
+
+ if description:
+ super().__init__(url, description)
+ else:
+ super().__init__(url)
+
+ @property
+ def url(self) -> StrOrURL:
+ return self._url
+
+ @property
+ def description(self) -> "str | None":
+ return self._description
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self}>"
+
+ def __str__(self) -> str:
+ if self._description:
+ return f"{self._url} - {self._description}"
+ return str(self._url)
+
+
+class InvalidUrlClientError(InvalidURL):
+ """Invalid URL client error."""
+
+
+class RedirectClientError(ClientError):
+ """Client redirect error."""
+
+
+class NonHttpUrlClientError(ClientError):
+ """Non http URL client error."""
+
+
+class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError):
+ """Invalid URL redirect client error."""
+
+
+class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError):
+ """Non http URL redirect client error."""
+
+
+class ClientSSLError(ClientConnectorError):
+ """Base error for ssl.*Errors."""
+
+
+if ssl is not None:
+ cert_errors = (ssl.CertificateError,)
+ cert_errors_bases = (
+ ClientSSLError,
+ ssl.CertificateError,
+ )
+
+ ssl_errors = (ssl.SSLError,)
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
+else: # pragma: no cover
+ cert_errors = tuple()
+ cert_errors_bases = (
+ ClientSSLError,
+ ValueError,
+ )
+
+ ssl_errors = tuple()
+ ssl_error_bases = (ClientSSLError,)
+
+
+class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
+ """Response ssl error."""
+
+
+class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
+ """Response certificate error."""
+
+ def __init__(
+ self, connection_key: ConnectionKey, certificate_error: Exception
+ ) -> None:
+ self._conn_key = connection_key
+ self._certificate_error = certificate_error
+ self.args = (connection_key, certificate_error)
+
+ @property
+ def certificate_error(self) -> Exception:
+ return self._certificate_error
+
+ @property
+ def host(self) -> str:
+ return self._conn_key.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._conn_key.port
+
+ @property
+ def ssl(self) -> bool:
+ return self._conn_key.is_ssl
+
+ def __str__(self) -> str:
+ return (
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
+ "[{0.certificate_error.__class__.__name__}: "
+ "{0.certificate_error.args}]".format(self)
+ )
+
+
+class WSMessageTypeError(TypeError):
+ """WebSocket message type is not valid."""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_middleware_digest_auth.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_middleware_digest_auth.py"
new file mode 100644
index 0000000..5aab5ac
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_middleware_digest_auth.py"
@@ -0,0 +1,480 @@
+"""
+Digest authentication middleware for aiohttp client.
+
+This middleware implements HTTP Digest Authentication according to RFC 7616,
+providing a more secure alternative to Basic Authentication. It supports all
+standard hash algorithms including MD5, SHA, SHA-256, SHA-512 and their session
+variants, as well as both 'auth' and 'auth-int' quality of protection (qop) options.
+"""
+
+import hashlib
+import os
+import re
+import sys
+import time
+from typing import (
+ Callable,
+ Dict,
+ Final,
+ FrozenSet,
+ List,
+ Literal,
+ Tuple,
+ TypedDict,
+ Union,
+)
+
+from yarl import URL
+
+from . import hdrs
+from .client_exceptions import ClientError
+from .client_middlewares import ClientHandlerType
+from .client_reqrep import ClientRequest, ClientResponse
+from .payload import Payload
+
+
+class DigestAuthChallenge(TypedDict, total=False):
+ realm: str
+ nonce: str
+ qop: str
+ algorithm: str
+ opaque: str
+ domain: str
+ stale: str
+
+
+DigestFunctions: Dict[str, Callable[[bytes], "hashlib._Hash"]] = {
+ "MD5": hashlib.md5,
+ "MD5-SESS": hashlib.md5,
+ "SHA": hashlib.sha1,
+ "SHA-SESS": hashlib.sha1,
+ "SHA256": hashlib.sha256,
+ "SHA256-SESS": hashlib.sha256,
+ "SHA-256": hashlib.sha256,
+ "SHA-256-SESS": hashlib.sha256,
+ "SHA512": hashlib.sha512,
+ "SHA512-SESS": hashlib.sha512,
+ "SHA-512": hashlib.sha512,
+ "SHA-512-SESS": hashlib.sha512,
+}
+
+
+# Compile the regex pattern once at module level for performance
+_HEADER_PAIRS_PATTERN = re.compile(
+ r'(?:^|\s|,\s*)(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))'
+ if sys.version_info < (3, 11)
+ else r'(?:^|\s|,\s*)((?>\w+))\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))'
+ # +------------|--------|--|-|-|--|----|------|----|--||-----|-> Match valid start/sep
+ # +--------|--|-|-|--|----|------|----|--||-----|-> alphanumeric key (atomic
+ # | | | | | | | | || | group reduces backtracking)
+ # +--|-|-|--|----|------|----|--||-----|-> maybe whitespace
+ # | | | | | | | || |
+ # +-|-|--|----|------|----|--||-----|-> = (delimiter)
+ # +-|--|----|------|----|--||-----|-> maybe whitespace
+ # | | | | | || |
+ # +--|----|------|----|--||-----|-> group quoted or unquoted
+ # | | | | || |
+ # +----|------|----|--||-----|-> if quoted...
+ # +------|----|--||-----|-> anything but " or \
+ # +----|--||-----|-> escaped characters allowed
+ # +--||-----|-> or can be empty string
+ # || |
+ # +|-----|-> if unquoted...
+ # +-----|-> anything but , or <space>
+ # +-> at least one char req'd
+)
+
+
+# RFC 7616: Challenge parameters to extract
+CHALLENGE_FIELDS: Final[
+ Tuple[
+ Literal["realm", "nonce", "qop", "algorithm", "opaque", "domain", "stale"], ...
+ ]
+] = (
+ "realm",
+ "nonce",
+ "qop",
+ "algorithm",
+ "opaque",
+ "domain",
+ "stale",
+)
+
+# Supported digest authentication algorithms
+# Use a tuple of sorted keys for predictable documentation and error messages
+SUPPORTED_ALGORITHMS: Final[Tuple[str, ...]] = tuple(sorted(DigestFunctions.keys()))
+
+# RFC 7616: Fields that require quoting in the Digest auth header
+# These fields must be enclosed in double quotes in the Authorization header.
+# Algorithm, qop, and nc are never quoted per RFC specifications.
+# This frozen set is used by the template-based header construction to
+# automatically determine which fields need quotes.
+QUOTED_AUTH_FIELDS: Final[FrozenSet[str]] = frozenset(
+ {"username", "realm", "nonce", "uri", "response", "opaque", "cnonce"}
+)
+
+
+def escape_quotes(value: str) -> str:
+ """Escape double quotes for HTTP header values."""
+ return value.replace('"', '\\"')
+
+
+def unescape_quotes(value: str) -> str:
+ """Unescape double quotes in HTTP header values."""
+ return value.replace('\\"', '"')
+
+
+def parse_header_pairs(header: str) -> Dict[str, str]:
+ """
+ Parse key-value pairs from WWW-Authenticate or similar HTTP headers.
+
+ This function handles the complex format of WWW-Authenticate header values,
+ supporting both quoted and unquoted values, proper handling of commas in
+ quoted values, and whitespace variations per RFC 7616.
+
+ Examples of supported formats:
+ - key1="value1", key2=value2
+ - key1 = "value1" , key2="value, with, commas"
+ - key1=value1,key2="value2"
+ - realm="example.com", nonce="12345", qop="auth"
+
+ Args:
+ header: The header value string to parse
+
+ Returns:
+ Dictionary mapping parameter names to their values
+ """
+ return {
+ stripped_key: unescape_quotes(quoted_val) if quoted_val else unquoted_val
+ for key, quoted_val, unquoted_val in _HEADER_PAIRS_PATTERN.findall(header)
+ if (stripped_key := key.strip())
+ }
+
+
+class DigestAuthMiddleware:
+ """
+ HTTP digest authentication middleware for aiohttp client.
+
+ This middleware intercepts 401 Unauthorized responses containing a Digest
+ authentication challenge, calculates the appropriate digest credentials,
+ and automatically retries the request with the proper Authorization header.
+
+ Features:
+ - Handles all aspects of Digest authentication handshake automatically
+ - Supports all standard hash algorithms:
+ - MD5, MD5-SESS
+ - SHA, SHA-SESS
+ - SHA256, SHA256-SESS, SHA-256, SHA-256-SESS
+ - SHA512, SHA512-SESS, SHA-512, SHA-512-SESS
+ - Supports 'auth' and 'auth-int' quality of protection modes
+ - Properly handles quoted strings and parameter parsing
+ - Includes replay attack protection with client nonce count tracking
+ - Supports preemptive authentication per RFC 7616 Section 3.6
+
+ Standards compliance:
+ - RFC 7616: HTTP Digest Access Authentication (primary reference)
+ - RFC 2617: HTTP Authentication (deprecated by RFC 7616)
+ - RFC 1945: Section 11.1 (username restrictions)
+
+ Implementation notes:
+ The core digest calculation is inspired by the implementation in
+ https://github.com/requests/requests/blob/v2.18.4/requests/auth.py
+ with added support for modern digest auth features and error handling.
+ """
+
+ def __init__(
+ self,
+ login: str,
+ password: str,
+ preemptive: bool = True,
+ ) -> None:
+ if login is None:
+ raise ValueError("None is not allowed as login value")
+
+ if password is None:
+ raise ValueError("None is not allowed as password value")
+
+ if ":" in login:
+ raise ValueError('A ":" is not allowed in username (RFC 1945#section-11.1)')
+
+ self._login_str: Final[str] = login
+ self._login_bytes: Final[bytes] = login.encode("utf-8")
+ self._password_bytes: Final[bytes] = password.encode("utf-8")
+
+ self._last_nonce_bytes = b""
+ self._nonce_count = 0
+ self._challenge: DigestAuthChallenge = {}
+ self._preemptive: bool = preemptive
+ # Set of URLs defining the protection space
+ self._protection_space: List[str] = []
+
+ async def _encode(
+ self, method: str, url: URL, body: Union[Payload, Literal[b""]]
+ ) -> str:
+ """
+ Build digest authorization header for the current challenge.
+
+ Args:
+ method: The HTTP method (GET, POST, etc.)
+ url: The request URL
+ body: The request body (used for qop=auth-int)
+
+ Returns:
+ A fully formatted Digest authorization header string
+
+ Raises:
+ ClientError: If the challenge is missing required parameters or
+ contains unsupported values
+
+ """
+ challenge = self._challenge
+ if "realm" not in challenge:
+ raise ClientError(
+ "Malformed Digest auth challenge: Missing 'realm' parameter"
+ )
+
+ if "nonce" not in challenge:
+ raise ClientError(
+ "Malformed Digest auth challenge: Missing 'nonce' parameter"
+ )
+
+ # Empty realm values are allowed per RFC 7616 (SHOULD, not MUST, contain host name)
+ realm = challenge["realm"]
+ nonce = challenge["nonce"]
+
+ # Empty nonce values are not allowed as they are security-critical for replay protection
+ if not nonce:
+ raise ClientError(
+ "Security issue: Digest auth challenge contains empty 'nonce' value"
+ )
+
+ qop_raw = challenge.get("qop", "")
+ # Preserve original algorithm case for response while using uppercase for processing
+ algorithm_original = challenge.get("algorithm", "MD5")
+ algorithm = algorithm_original.upper()
+ opaque = challenge.get("opaque", "")
+
+ # Convert string values to bytes once
+ nonce_bytes = nonce.encode("utf-8")
+ realm_bytes = realm.encode("utf-8")
+ path = URL(url).path_qs
+
+ # Process QoP
+ qop = ""
+ qop_bytes = b""
+ if qop_raw:
+ valid_qops = {"auth", "auth-int"}.intersection(
+ {q.strip() for q in qop_raw.split(",") if q.strip()}
+ )
+ if not valid_qops:
+ raise ClientError(
+ f"Digest auth error: Unsupported Quality of Protection (qop) value(s): {qop_raw}"
+ )
+
+ qop = "auth-int" if "auth-int" in valid_qops else "auth"
+ qop_bytes = qop.encode("utf-8")
+
+ if algorithm not in DigestFunctions:
+ raise ClientError(
+ f"Digest auth error: Unsupported hash algorithm: {algorithm}. "
+ f"Supported algorithms: {', '.join(SUPPORTED_ALGORITHMS)}"
+ )
+ hash_fn: Final = DigestFunctions[algorithm]
+
+ def H(x: bytes) -> bytes:
+ """RFC 7616 Section 3: Hash function H(data) = hex(hash(data))."""
+ return hash_fn(x).hexdigest().encode()
+
+ def KD(s: bytes, d: bytes) -> bytes:
+ """RFC 7616 Section 3: KD(secret, data) = H(concat(secret, ":", data))."""
+ return H(b":".join((s, d)))
+
+ # Calculate A1 and A2
+ A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes))
+ A2 = f"{method.upper()}:{path}".encode()
+ if qop == "auth-int":
+ if isinstance(body, Payload): # will always be empty bytes unless Payload
+ entity_bytes = await body.as_bytes() # Get bytes from Payload
+ else:
+ entity_bytes = body
+ entity_hash = H(entity_bytes)
+ A2 = b":".join((A2, entity_hash))
+
+ HA1 = H(A1)
+ HA2 = H(A2)
+
+ # Nonce count handling
+ if nonce_bytes == self._last_nonce_bytes:
+ self._nonce_count += 1
+ else:
+ self._nonce_count = 1
+
+ self._last_nonce_bytes = nonce_bytes
+ ncvalue = f"{self._nonce_count:08x}"
+ ncvalue_bytes = ncvalue.encode("utf-8")
+
+ # Generate client nonce
+ cnonce = hashlib.sha1(
+ b"".join(
+ [
+ str(self._nonce_count).encode("utf-8"),
+ nonce_bytes,
+ time.ctime().encode("utf-8"),
+ os.urandom(8),
+ ]
+ )
+ ).hexdigest()[:16]
+ cnonce_bytes = cnonce.encode("utf-8")
+
+ # Special handling for session-based algorithms
+ if algorithm.upper().endswith("-SESS"):
+ HA1 = H(b":".join((HA1, nonce_bytes, cnonce_bytes)))
+
+ # Calculate the response digest
+ if qop:
+ noncebit = b":".join(
+ (nonce_bytes, ncvalue_bytes, cnonce_bytes, qop_bytes, HA2)
+ )
+ response_digest = KD(HA1, noncebit)
+ else:
+ response_digest = KD(HA1, b":".join((nonce_bytes, HA2)))
+
+ # Define a dict mapping of header fields to their values
+ # Group fields into always-present, optional, and qop-dependent
+ header_fields = {
+ # Always present fields
+ "username": escape_quotes(self._login_str),
+ "realm": escape_quotes(realm),
+ "nonce": escape_quotes(nonce),
+ "uri": path,
+ "response": response_digest.decode(),
+ "algorithm": algorithm_original,
+ }
+
+ # Optional fields
+ if opaque:
+ header_fields["opaque"] = escape_quotes(opaque)
+
+ # QoP-dependent fields
+ if qop:
+ header_fields["qop"] = qop
+ header_fields["nc"] = ncvalue
+ header_fields["cnonce"] = cnonce
+
+ # Build header using templates for each field type
+ pairs: List[str] = []
+ for field, value in header_fields.items():
+ if field in QUOTED_AUTH_FIELDS:
+ pairs.append(f'{field}="{value}"')
+ else:
+ pairs.append(f"{field}={value}")
+
+ return f"Digest {', '.join(pairs)}"
+
+ def _in_protection_space(self, url: URL) -> bool:
+ """
+ Check if the given URL is within the current protection space.
+
+ According to RFC 7616, a URI is in the protection space if any URI
+ in the protection space is a prefix of it (after both have been made absolute).
+ """
+ request_str = str(url)
+ for space_str in self._protection_space:
+ # Check if request starts with space URL
+ if not request_str.startswith(space_str):
+ continue
+ # Exact match or space ends with / (proper directory prefix)
+ if len(request_str) == len(space_str) or space_str[-1] == "/":
+ return True
+ # Check next char is / to ensure proper path boundary
+ if request_str[len(space_str)] == "/":
+ return True
+ return False
+
+ def _authenticate(self, response: ClientResponse) -> bool:
+ """
+ Takes the given response and tries digest-auth, if needed.
+
+ Returns true if the original request must be resent.
+ """
+ if response.status != 401:
+ return False
+
+ auth_header = response.headers.get("www-authenticate", "")
+ if not auth_header:
+ return False # No authentication header present
+
+ method, sep, headers = auth_header.partition(" ")
+ if not sep:
+ # No space found in www-authenticate header
+ return False # Malformed auth header, missing scheme separator
+
+ if method.lower() != "digest":
+ # Not a digest auth challenge (could be Basic, Bearer, etc.)
+ return False
+
+ if not headers:
+ # We have a digest scheme but no parameters
+ return False # Malformed digest header, missing parameters
+
+ # We have a digest auth header with content
+ if not (header_pairs := parse_header_pairs(headers)):
+ # Failed to parse any key-value pairs
+ return False # Malformed digest header, no valid parameters
+
+ # Extract challenge parameters
+ self._challenge = {}
+ for field in CHALLENGE_FIELDS:
+ if value := header_pairs.get(field):
+ self._challenge[field] = value
+
+ # Update protection space based on domain parameter or default to origin
+ origin = response.url.origin()
+
+ if domain := self._challenge.get("domain"):
+ # Parse space-separated list of URIs
+ self._protection_space = []
+ for uri in domain.split():
+ # Remove quotes if present
+ uri = uri.strip('"')
+ if uri.startswith("/"):
+ # Path-absolute, relative to origin
+ self._protection_space.append(str(origin.join(URL(uri))))
+ else:
+ # Absolute URI
+ self._protection_space.append(str(URL(uri)))
+ else:
+ # No domain specified, protection space is entire origin
+ self._protection_space = [str(origin)]
+
+ # Return True only if we found at least one challenge parameter
+ return bool(self._challenge)
+
+ async def __call__(
+ self, request: ClientRequest, handler: ClientHandlerType
+ ) -> ClientResponse:
+ """Run the digest auth middleware."""
+ response = None
+ for retry_count in range(2):
+ # Apply authorization header if:
+ # 1. This is a retry after 401 (retry_count > 0), OR
+ # 2. Preemptive auth is enabled AND we have a challenge AND the URL is in protection space
+ if retry_count > 0 or (
+ self._preemptive
+ and self._challenge
+ and self._in_protection_space(request.url)
+ ):
+ request.headers[hdrs.AUTHORIZATION] = await self._encode(
+ request.method, request.url, request.body
+ )
+
+ # Send the request
+ response = await handler(request)
+
+ # Check if we need to authenticate
+ if not self._authenticate(response):
+ break
+
+ # At this point, response is guaranteed to be defined
+ assert response is not None
+ return response
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_middlewares.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_middlewares.py"
new file mode 100644
index 0000000..3ca2cb2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_middlewares.py"
@@ -0,0 +1,55 @@
+"""Client middleware support."""
+
+from collections.abc import Awaitable, Callable, Sequence
+
+from .client_reqrep import ClientRequest, ClientResponse
+
+__all__ = ("ClientMiddlewareType", "ClientHandlerType", "build_client_middlewares")
+
+# Type alias for client request handlers - functions that process requests and return responses
+ClientHandlerType = Callable[[ClientRequest], Awaitable[ClientResponse]]
+
+# Type for client middleware - similar to server but uses ClientRequest/ClientResponse
+ClientMiddlewareType = Callable[
+ [ClientRequest, ClientHandlerType], Awaitable[ClientResponse]
+]
+
+
+def build_client_middlewares(
+ handler: ClientHandlerType,
+ middlewares: Sequence[ClientMiddlewareType],
+) -> ClientHandlerType:
+ """
+ Apply middlewares to request handler.
+
+ The middlewares are applied in reverse order, so the first middleware
+ in the list wraps all subsequent middlewares and the handler.
+
+ This implementation avoids using partial/update_wrapper to minimize overhead
+ and doesn't cache to avoid holding references to stateful middleware.
+ """
+ # Optimize for single middleware case
+ if len(middlewares) == 1:
+ middleware = middlewares[0]
+
+ async def single_middleware_handler(req: ClientRequest) -> ClientResponse:
+ return await middleware(req, handler)
+
+ return single_middleware_handler
+
+ # Build the chain for multiple middlewares
+ current_handler = handler
+
+ for middleware in reversed(middlewares):
+ # Create a new closure that captures the current state
+ def make_wrapper(
+ mw: ClientMiddlewareType, next_h: ClientHandlerType
+ ) -> ClientHandlerType:
+ async def wrapped(req: ClientRequest) -> ClientResponse:
+ return await mw(req, next_h)
+
+ return wrapped
+
+ current_handler = make_wrapper(middleware, current_handler)
+
+ return current_handler
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_proto.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_proto.py"
new file mode 100644
index 0000000..e2fb1ce
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_proto.py"
@@ -0,0 +1,359 @@
+import asyncio
+from contextlib import suppress
+from typing import Any, Optional, Tuple, Union
+
+from .base_protocol import BaseProtocol
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientOSError,
+ ClientPayloadError,
+ ServerDisconnectedError,
+ SocketTimeoutError,
+)
+from .helpers import (
+ _EXC_SENTINEL,
+ EMPTY_BODY_STATUS_CODES,
+ BaseTimerContext,
+ set_exception,
+ set_result,
+)
+from .http import HttpResponseParser, RawResponseMessage
+from .http_exceptions import HttpProcessingError
+from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
+
+
+class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
+ """Helper class to adapt between Protocol and StreamReader."""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ BaseProtocol.__init__(self, loop=loop)
+ DataQueue.__init__(self, loop)
+
+ self._should_close = False
+
+ self._payload: Optional[StreamReader] = None
+ self._skip_payload = False
+ self._payload_parser = None
+
+ self._timer = None
+
+ self._tail = b""
+ self._upgraded = False
+ self._parser: Optional[HttpResponseParser] = None
+
+ self._read_timeout: Optional[float] = None
+ self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
+
+ self._timeout_ceil_threshold: Optional[float] = 5
+
+ self._closed: Union[None, asyncio.Future[None]] = None
+ self._connection_lost_called = False
+
+ @property
+ def closed(self) -> Union[None, asyncio.Future[None]]:
+ """Future that is set when the connection is closed.
+
+ This property returns a Future that will be completed when the connection
+ is closed. The Future is created lazily on first access to avoid creating
+ futures that will never be awaited.
+
+ Returns:
+ - A Future[None] if the connection is still open or was closed after
+ this property was accessed
+ - None if connection_lost() was already called before this property
+ was ever accessed (indicating no one is waiting for the closure)
+ """
+ if self._closed is None and not self._connection_lost_called:
+ self._closed = self._loop.create_future()
+ return self._closed
+
+ @property
+ def upgraded(self) -> bool:
+ return self._upgraded
+
+ @property
+ def should_close(self) -> bool:
+ return bool(
+ self._should_close
+ or (self._payload is not None and not self._payload.is_eof())
+ or self._upgraded
+ or self._exception is not None
+ or self._payload_parser is not None
+ or self._buffer
+ or self._tail
+ )
+
+ def force_close(self) -> None:
+ self._should_close = True
+
+ def close(self) -> None:
+ self._exception = None # Break cyclic references
+ transport = self.transport
+ if transport is not None:
+ transport.close()
+ self.transport = None
+ self._payload = None
+ self._drop_timeout()
+
+ def abort(self) -> None:
+ self._exception = None # Break cyclic references
+ transport = self.transport
+ if transport is not None:
+ transport.abort()
+ self.transport = None
+ self._payload = None
+ self._drop_timeout()
+
+ def is_connected(self) -> bool:
+ return self.transport is not None and not self.transport.is_closing()
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ self._connection_lost_called = True
+ self._drop_timeout()
+
+ original_connection_error = exc
+ reraised_exc = original_connection_error
+
+ connection_closed_cleanly = original_connection_error is None
+
+ if self._closed is not None:
+ # If someone is waiting for the closed future,
+ # we should set it to None or an exception. If
+ # self._closed is None, it means that
+ # connection_lost() was called already
+ # or nobody is waiting for it.
+ if connection_closed_cleanly:
+ set_result(self._closed, None)
+ else:
+ assert original_connection_error is not None
+ set_exception(
+ self._closed,
+ ClientConnectionError(
+ f"Connection lost: {original_connection_error !s}",
+ ),
+ original_connection_error,
+ )
+
+ if self._payload_parser is not None:
+ with suppress(Exception): # FIXME: log this somehow?
+ self._payload_parser.feed_eof()
+
+ uncompleted = None
+ if self._parser is not None:
+ try:
+ uncompleted = self._parser.feed_eof()
+ except Exception as underlying_exc:
+ if self._payload is not None:
+ client_payload_exc_msg = (
+ f"Response payload is not completed: {underlying_exc !r}"
+ )
+ if not connection_closed_cleanly:
+ client_payload_exc_msg = (
+ f"{client_payload_exc_msg !s}. "
+ f"{original_connection_error !r}"
+ )
+ set_exception(
+ self._payload,
+ ClientPayloadError(client_payload_exc_msg),
+ underlying_exc,
+ )
+
+ if not self.is_eof():
+ if isinstance(original_connection_error, OSError):
+ reraised_exc = ClientOSError(*original_connection_error.args)
+ if connection_closed_cleanly:
+ reraised_exc = ServerDisconnectedError(uncompleted)
+ # assigns self._should_close to True as side effect,
+ # we do it anyway below
+ underlying_non_eof_exc = (
+ _EXC_SENTINEL
+ if connection_closed_cleanly
+ else original_connection_error
+ )
+ assert underlying_non_eof_exc is not None
+ assert reraised_exc is not None
+ self.set_exception(reraised_exc, underlying_non_eof_exc)
+
+ self._should_close = True
+ self._parser = None
+ self._payload = None
+ self._payload_parser = None
+ self._reading_paused = False
+
+ super().connection_lost(reraised_exc)
+
+ def eof_received(self) -> None:
+ # should call parser.feed_eof() most likely
+ self._drop_timeout()
+
+ def pause_reading(self) -> None:
+ super().pause_reading()
+ self._drop_timeout()
+
+ def resume_reading(self) -> None:
+ super().resume_reading()
+ self._reschedule_timeout()
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ self._should_close = True
+ self._drop_timeout()
+ super().set_exception(exc, exc_cause)
+
+ def set_parser(self, parser: Any, payload: Any) -> None:
+ # TODO: actual types are:
+ # parser: WebSocketReader
+ # payload: WebSocketDataQueue
+ # but they are not generi enough
+ # Need an ABC for both types
+ self._payload = payload
+ self._payload_parser = parser
+
+ self._drop_timeout()
+
+ if self._tail:
+ data, self._tail = self._tail, b""
+ self.data_received(data)
+
+ def set_response_params(
+ self,
+ *,
+ timer: Optional[BaseTimerContext] = None,
+ skip_payload: bool = False,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ read_timeout: Optional[float] = None,
+ read_bufsize: int = 2**16,
+ timeout_ceil_threshold: float = 5,
+ max_line_size: int = 8190,
+ max_field_size: int = 8190,
+ ) -> None:
+ self._skip_payload = skip_payload
+
+ self._read_timeout = read_timeout
+
+ self._timeout_ceil_threshold = timeout_ceil_threshold
+
+ self._parser = HttpResponseParser(
+ self,
+ self._loop,
+ read_bufsize,
+ timer=timer,
+ payload_exception=ClientPayloadError,
+ response_with_body=not skip_payload,
+ read_until_eof=read_until_eof,
+ auto_decompress=auto_decompress,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
+ )
+
+ if self._tail:
+ data, self._tail = self._tail, b""
+ self.data_received(data)
+
+ def _drop_timeout(self) -> None:
+ if self._read_timeout_handle is not None:
+ self._read_timeout_handle.cancel()
+ self._read_timeout_handle = None
+
+ def _reschedule_timeout(self) -> None:
+ timeout = self._read_timeout
+ if self._read_timeout_handle is not None:
+ self._read_timeout_handle.cancel()
+
+ if timeout:
+ self._read_timeout_handle = self._loop.call_later(
+ timeout, self._on_read_timeout
+ )
+ else:
+ self._read_timeout_handle = None
+
+ def start_timeout(self) -> None:
+ self._reschedule_timeout()
+
+ @property
+ def read_timeout(self) -> Optional[float]:
+ return self._read_timeout
+
+ @read_timeout.setter
+ def read_timeout(self, read_timeout: Optional[float]) -> None:
+ self._read_timeout = read_timeout
+
+ def _on_read_timeout(self) -> None:
+ exc = SocketTimeoutError("Timeout on reading data from socket")
+ self.set_exception(exc)
+ if self._payload is not None:
+ set_exception(self._payload, exc)
+
+ def data_received(self, data: bytes) -> None:
+ self._reschedule_timeout()
+
+ if not data:
+ return
+
+ # custom payload parser - currently always WebSocketReader
+ if self._payload_parser is not None:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self._payload = None
+ self._payload_parser = None
+
+ if tail:
+ self.data_received(tail)
+ return
+
+ if self._upgraded or self._parser is None:
+ # i.e. websocket connection, websocket parser is not set yet
+ self._tail += data
+ return
+
+ # parse http messages
+ try:
+ messages, upgraded, tail = self._parser.feed_data(data)
+ except BaseException as underlying_exc:
+ if self.transport is not None:
+ # connection.release() could be called BEFORE
+ # data_received(), the transport is already
+ # closed in this case
+ self.transport.close()
+ # should_close is True after the call
+ if isinstance(underlying_exc, HttpProcessingError):
+ exc = HttpProcessingError(
+ code=underlying_exc.code,
+ message=underlying_exc.message,
+ headers=underlying_exc.headers,
+ )
+ else:
+ exc = HttpProcessingError()
+ self.set_exception(exc, underlying_exc)
+ return
+
+ self._upgraded = upgraded
+
+ payload: Optional[StreamReader] = None
+ for message, payload in messages:
+ if message.should_close:
+ self._should_close = True
+
+ self._payload = payload
+
+ if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES:
+ self.feed_data((message, EMPTY_PAYLOAD), 0)
+ else:
+ self.feed_data((message, payload), 0)
+
+ if payload is not None:
+ # new message(s) was processed
+ # register timeout handler unsubscribing
+ # either on end-of-stream or immediately for
+ # EMPTY_PAYLOAD
+ if payload is not EMPTY_PAYLOAD:
+ payload.on_eof(self._drop_timeout)
+ else:
+ self._drop_timeout()
+
+ if upgraded and tail:
+ self.data_received(tail)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_reqrep.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_reqrep.py"
new file mode 100644
index 0000000..a9e0795
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_reqrep.py"
@@ -0,0 +1,1536 @@
+import asyncio
+import codecs
+import contextlib
+import functools
+import io
+import re
+import sys
+import traceback
+import warnings
+from collections.abc import Mapping
+from hashlib import md5, sha1, sha256
+from http.cookies import Morsel, SimpleCookie
+from types import MappingProxyType, TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ List,
+ Literal,
+ NamedTuple,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+)
+
+import attr
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs, helpers, http, multipart, payload
+from ._cookie_helpers import (
+ parse_cookie_header,
+ parse_set_cookie_headers,
+ preserve_morsel_with_coded_value,
+)
+from .abc import AbstractStreamWriter
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientOSError,
+ ClientResponseError,
+ ContentTypeError,
+ InvalidURL,
+ ServerFingerprintMismatch,
+)
+from .compression_utils import HAS_BROTLI, HAS_ZSTD
+from .formdata import FormData
+from .helpers import (
+ _SENTINEL,
+ BaseTimerContext,
+ BasicAuth,
+ HeadersMixin,
+ TimerNoop,
+ noop,
+ reify,
+ sentinel,
+ set_exception,
+ set_result,
+)
+from .http import (
+ SERVER_SOFTWARE,
+ HttpVersion,
+ HttpVersion10,
+ HttpVersion11,
+ StreamWriter,
+)
+from .streams import StreamReader
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ JSONDecoder,
+ LooseCookies,
+ LooseHeaders,
+ Query,
+ RawHeaders,
+)
+
+if TYPE_CHECKING:
+ import ssl
+ from ssl import SSLContext
+else:
+ try:
+ import ssl
+ from ssl import SSLContext
+ except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
+
+
+if TYPE_CHECKING:
+ from .client import ClientSession
+ from .connector import Connection
+ from .tracing import Trace
+
+
+_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
+
+
+def _gen_default_accept_encoding() -> str:
+ encodings = [
+ "gzip",
+ "deflate",
+ ]
+ if HAS_BROTLI:
+ encodings.append("br")
+ if HAS_ZSTD:
+ encodings.append("zstd")
+ return ", ".join(encodings)
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ContentDisposition:
+ type: Optional[str]
+ parameters: "MappingProxyType[str, str]"
+ filename: Optional[str]
+
+
+class _RequestInfo(NamedTuple):
+ url: URL
+ method: str
+ headers: "CIMultiDictProxy[str]"
+ real_url: URL
+
+
+class RequestInfo(_RequestInfo):
+
+ def __new__(
+ cls,
+ url: URL,
+ method: str,
+ headers: "CIMultiDictProxy[str]",
+ real_url: Union[URL, _SENTINEL] = sentinel,
+ ) -> "RequestInfo":
+ """Create a new RequestInfo instance.
+
+ For backwards compatibility, the real_url parameter is optional.
+ """
+ return tuple.__new__(
+ cls, (url, method, headers, url if real_url is sentinel else real_url)
+ )
+
+
+class Fingerprint:
+ HASHFUNC_BY_DIGESTLEN = {
+ 16: md5,
+ 20: sha1,
+ 32: sha256,
+ }
+
+ def __init__(self, fingerprint: bytes) -> None:
+ digestlen = len(fingerprint)
+ hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
+ if not hashfunc:
+ raise ValueError("fingerprint has invalid length")
+ elif hashfunc is md5 or hashfunc is sha1:
+ raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.")
+ self._hashfunc = hashfunc
+ self._fingerprint = fingerprint
+
+ @property
+ def fingerprint(self) -> bytes:
+ return self._fingerprint
+
+ def check(self, transport: asyncio.Transport) -> None:
+ if not transport.get_extra_info("sslcontext"):
+ return
+ sslobj = transport.get_extra_info("ssl_object")
+ cert = sslobj.getpeercert(binary_form=True)
+ got = self._hashfunc(cert).digest()
+ if got != self._fingerprint:
+ host, port, *_ = transport.get_extra_info("peername")
+ raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
+
+
+if ssl is not None:
+ SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
+else: # pragma: no cover
+ SSL_ALLOWED_TYPES = (bool, type(None))
+
+
+def _merge_ssl_params(
+ ssl: Union["SSLContext", bool, Fingerprint],
+ verify_ssl: Optional[bool],
+ ssl_context: Optional["SSLContext"],
+ fingerprint: Optional[bytes],
+) -> Union["SSLContext", bool, Fingerprint]:
+ if ssl is None:
+ ssl = True # Double check for backwards compatibility
+ if verify_ssl is not None and not verify_ssl:
+ warnings.warn(
+ "verify_ssl is deprecated, use ssl=False instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not True:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = False
+ if ssl_context is not None:
+ warnings.warn(
+ "ssl_context is deprecated, use ssl=context instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not True:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = ssl_context
+ if fingerprint is not None:
+ warnings.warn(
+ "fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not True:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = Fingerprint(fingerprint)
+ if not isinstance(ssl, SSL_ALLOWED_TYPES):
+ raise TypeError(
+ "ssl should be SSLContext, bool, Fingerprint or None, "
+ "got {!r} instead.".format(ssl)
+ )
+ return ssl
+
+
+_SSL_SCHEMES = frozenset(("https", "wss"))
+
+
+# ConnectionKey is a NamedTuple because it is used as a key in a dict
+# and a set in the connector. Since a NamedTuple is a tuple it uses
+# the fast native tuple __hash__ and __eq__ implementation in CPython.
+class ConnectionKey(NamedTuple):
+ # the key should contain an information about used proxy / TLS
+ # to prevent reusing wrong connections from a pool
+ host: str
+ port: Optional[int]
+ is_ssl: bool
+ ssl: Union[SSLContext, bool, Fingerprint]
+ proxy: Optional[URL]
+ proxy_auth: Optional[BasicAuth]
+ proxy_headers_hash: Optional[int] # hash(CIMultiDict)
+
+
+def _is_expected_content_type(
+ response_content_type: str, expected_content_type: str
+) -> bool:
+ if expected_content_type == "application/json":
+ return json_re.match(response_content_type) is not None
+ return expected_content_type in response_content_type
+
+
+def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) -> None:
+ """Warn if the payload is not closed.
+
+ Callers must check that the body is a Payload before calling this method.
+
+ Args:
+ payload: The payload to check
+ stacklevel: Stack level for the warning (default 2 for direct callers)
+ """
+ if not payload.autoclose and not payload.consumed:
+ warnings.warn(
+ "The previous request body contains unclosed resources. "
+ "Use await request.update_body() instead of setting request.body "
+ "directly to properly close resources and avoid leaks.",
+ ResourceWarning,
+ stacklevel=stacklevel,
+ )
+
+
+class ClientResponse(HeadersMixin):
+
+ # Some of these attributes are None when created,
+ # but will be set by the start() method.
+ # As the end user will likely never see the None values, we cheat the types below.
+ # from the Status-Line of the response
+ version: Optional[HttpVersion] = None # HTTP-Version
+ status: int = None # type: ignore[assignment] # Status-Code
+ reason: Optional[str] = None # Reason-Phrase
+
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
+ _body: Optional[bytes] = None
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
+ _history: Tuple["ClientResponse", ...] = ()
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
+
+ _connection: Optional["Connection"] = None # current connection
+ _cookies: Optional[SimpleCookie] = None
+ _raw_cookie_headers: Optional[Tuple[str, ...]] = None
+ _continue: Optional["asyncio.Future[bool]"] = None
+ _source_traceback: Optional[traceback.StackSummary] = None
+ _session: Optional["ClientSession"] = None
+ # set up by ClientRequest after ClientResponse object creation
+ # post-init stage allows to not change ctor signature
+ _closed = True # to allow __del__ for non-initialized properly response
+ _released = False
+ _in_context = False
+
+ _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
+
+ __writer: Optional["asyncio.Task[None]"] = None
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ writer: "Optional[asyncio.Task[None]]",
+ continue100: Optional["asyncio.Future[bool]"],
+ timer: BaseTimerContext,
+ request_info: RequestInfo,
+ traces: List["Trace"],
+ loop: asyncio.AbstractEventLoop,
+ session: "ClientSession",
+ ) -> None:
+ # URL forbids subclasses, so a simple type check is enough.
+ assert type(url) is URL
+
+ self.method = method
+
+ self._real_url = url
+ self._url = url.with_fragment(None) if url.raw_fragment else url
+ if writer is not None:
+ self._writer = writer
+ if continue100 is not None:
+ self._continue = continue100
+ self._request_info = request_info
+ self._timer = timer if timer is not None else TimerNoop()
+ self._cache: Dict[str, Any] = {}
+ self._traces = traces
+ self._loop = loop
+ # Save reference to _resolve_charset, so that get_encoding() will still
+ # work after the response has finished reading the body.
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
+ if session is not None:
+ # store a reference to session #1985
+ self._session = session
+ self._resolve_charset = session._resolve_charset
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ def __reset_writer(self, _: object = None) -> None:
+ self.__writer = None
+
+ @property
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
+ """The writer task for streaming data.
+
+ _writer is only provided for backwards compatibility
+ for subclasses that may need to access it.
+ """
+ return self.__writer
+
+ @_writer.setter
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
+ """Set the writer task for streaming data."""
+ if self.__writer is not None:
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = writer
+ if writer is None:
+ return
+ if writer.done():
+ # The writer is already done, so we can clear it immediately.
+ self.__writer = None
+ else:
+ writer.add_done_callback(self.__reset_writer)
+
+ @property
+ def cookies(self) -> SimpleCookie:
+ if self._cookies is None:
+ if self._raw_cookie_headers is not None:
+ # Parse cookies for response.cookies (SimpleCookie for backward compatibility)
+ cookies = SimpleCookie()
+ # Use parse_set_cookie_headers for more lenient parsing that handles
+ # malformed cookies better than SimpleCookie.load
+ cookies.update(parse_set_cookie_headers(self._raw_cookie_headers))
+ self._cookies = cookies
+ else:
+ self._cookies = SimpleCookie()
+ return self._cookies
+
+ @cookies.setter
+ def cookies(self, cookies: SimpleCookie) -> None:
+ self._cookies = cookies
+ # Generate raw cookie headers from the SimpleCookie
+ if cookies:
+ self._raw_cookie_headers = tuple(
+ morsel.OutputString() for morsel in cookies.values()
+ )
+ else:
+ self._raw_cookie_headers = None
+
+ @reify
+ def url(self) -> URL:
+ return self._url
+
+ @reify
+ def url_obj(self) -> URL:
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
+ return self._url
+
+ @reify
+ def real_url(self) -> URL:
+ return self._real_url
+
+ @reify
+ def host(self) -> str:
+ assert self._url.host is not None
+ return self._url.host
+
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ return self._headers
+
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ return self._raw_headers
+
+ @reify
+ def request_info(self) -> RequestInfo:
+ return self._request_info
+
+ @reify
+ def content_disposition(self) -> Optional[ContentDisposition]:
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
+ if raw is None:
+ return None
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
+ params = MappingProxyType(params_dct)
+ filename = multipart.content_disposition_filename(params)
+ return ContentDisposition(disposition_type, params, filename)
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
+
+ if self._connection is not None:
+ self._connection.release()
+ self._cleanup_writer()
+
+ if self._loop.get_debug():
+ kwargs = {"source": self}
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
+ context = {"client_response": self, "message": "Unclosed response"}
+ if self._source_traceback:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __repr__(self) -> str:
+ out = io.StringIO()
+ ascii_encodable_url = str(self.url)
+ if self.reason:
+ ascii_encodable_reason = self.reason.encode(
+ "ascii", "backslashreplace"
+ ).decode("ascii")
+ else:
+ ascii_encodable_reason = "None"
+ print(
+ "<ClientResponse({}) [{} {}]>".format(
+ ascii_encodable_url, self.status, ascii_encodable_reason
+ ),
+ file=out,
+ )
+ print(self.headers, file=out)
+ return out.getvalue()
+
+ @property
+ def connection(self) -> Optional["Connection"]:
+ return self._connection
+
+ @reify
+ def history(self) -> Tuple["ClientResponse", ...]:
+ """A sequence of of responses, if redirects occurred."""
+ return self._history
+
+ @reify
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
+ links_str = ", ".join(self.headers.getall("link", []))
+
+ if not links_str:
+ return MultiDictProxy(MultiDict())
+
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
+
+ for val in re.split(r",(?=\s*<)", links_str):
+ match = re.match(r"\s*<(.*)>(.*)", val)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ url, params_str = match.groups()
+ params = params_str.split(";")[1:]
+
+ link: MultiDict[Union[str, URL]] = MultiDict()
+
+ for param in params:
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ key, _, value, _ = match.groups()
+
+ link.add(key, value)
+
+ key = link.get("rel", url)
+
+ link.add("url", self.url.join(URL(url)))
+
+ links.add(str(key), MultiDictProxy(link))
+
+ return MultiDictProxy(links)
+
+ async def start(self, connection: "Connection") -> "ClientResponse":
+ """Start response processing."""
+ self._closed = False
+ self._protocol = connection.protocol
+ self._connection = connection
+
+ with self._timer:
+ while True:
+ # read response
+ try:
+ protocol = self._protocol
+ message, payload = await protocol.read() # type: ignore[union-attr]
+ except http.HttpProcessingError as exc:
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=exc.code,
+ message=exc.message,
+ headers=exc.headers,
+ ) from exc
+
+ if message.code < 100 or message.code > 199 or message.code == 101:
+ break
+
+ if self._continue is not None:
+ set_result(self._continue, True)
+ self._continue = None
+
+ # payload eof handler
+ payload.on_eof(self._response_eof)
+
+ # response status
+ self.version = message.version
+ self.status = message.code
+ self.reason = message.reason
+
+ # headers
+ self._headers = message.headers # type is CIMultiDictProxy
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
+
+ # payload
+ self.content = payload
+
+ # cookies
+ if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
+ # Store raw cookie headers for CookieJar
+ self._raw_cookie_headers = tuple(cookie_hdrs)
+ return self
+
+ def _response_eof(self) -> None:
+ if self._closed:
+ return
+
+ # protocol could be None because connection could be detached
+ protocol = self._connection and self._connection.protocol
+ if protocol is not None and protocol.upgraded:
+ return
+
+ self._closed = True
+ self._cleanup_writer()
+ self._release_connection()
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ def close(self) -> None:
+ if not self._released:
+ self._notify_content()
+
+ self._closed = True
+ if self._loop is None or self._loop.is_closed():
+ return
+
+ self._cleanup_writer()
+ if self._connection is not None:
+ self._connection.close()
+ self._connection = None
+
+ def release(self) -> Any:
+ if not self._released:
+ self._notify_content()
+
+ self._closed = True
+
+ self._cleanup_writer()
+ self._release_connection()
+ return noop()
+
+ @property
+ def ok(self) -> bool:
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+
+ This is **not** a check for ``200 OK`` but a check that the response
+ status is under 400.
+ """
+ return 400 > self.status
+
+ def raise_for_status(self) -> None:
+ if not self.ok:
+ # reason should always be not None for a started response
+ assert self.reason is not None
+
+ # If we're in a context we can rely on __aexit__() to release as the
+ # exception propagates.
+ if not self._in_context:
+ self.release()
+
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=self.reason,
+ headers=self.headers,
+ )
+
+ def _release_connection(self) -> None:
+ if self._connection is not None:
+ if self.__writer is None:
+ self._connection.release()
+ self._connection = None
+ else:
+ self.__writer.add_done_callback(lambda f: self._release_connection())
+
+ async def _wait_released(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ self._release_connection()
+
+ def _cleanup_writer(self) -> None:
+ if self.__writer is not None:
+ self.__writer.cancel()
+ self._session = None
+
+ def _notify_content(self) -> None:
+ content = self.content
+ if content and content.exception() is None:
+ set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
+ self._released = True
+
+ async def wait_for_close(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ self.release()
+
+ async def read(self) -> bytes:
+ """Read response payload."""
+ if self._body is None:
+ try:
+ self._body = await self.content.read()
+ for trace in self._traces:
+ await trace.send_response_chunk_received(
+ self.method, self.url, self._body
+ )
+ except BaseException:
+ self.close()
+ raise
+ elif self._released: # Response explicitly released
+ raise ClientConnectionError("Connection closed")
+
+ protocol = self._connection and self._connection.protocol
+ if protocol is None or not protocol.upgraded:
+ await self._wait_released() # Underlying connection released
+ return self._body
+
+ def get_encoding(self) -> str:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ mimetype = helpers.parse_mimetype(ctype)
+
+ encoding = mimetype.parameters.get("charset")
+ if encoding:
+ with contextlib.suppress(LookupError, ValueError):
+ return codecs.lookup(encoding).name
+
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
+ ):
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ return "utf-8"
+
+ if self._body is None:
+ raise RuntimeError(
+ "Cannot compute fallback encoding of a not yet read body"
+ )
+
+ return self._resolve_charset(self, self._body)
+
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
+ """Read response payload and decode."""
+ if self._body is None:
+ await self.read()
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
+
+ async def json(
+ self,
+ *,
+ encoding: Optional[str] = None,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ content_type: Optional[str] = "application/json",
+ ) -> Any:
+ """Read and decodes JSON response."""
+ if self._body is None:
+ await self.read()
+
+ if content_type:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ if not _is_expected_content_type(ctype, content_type):
+ raise ContentTypeError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=(
+ "Attempt to decode JSON with unexpected mimetype: %s" % ctype
+ ),
+ headers=self.headers,
+ )
+
+ stripped = self._body.strip() # type: ignore[union-attr]
+ if not stripped:
+ return None
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return loads(stripped.decode(encoding))
+
+ async def __aenter__(self) -> "ClientResponse":
+ self._in_context = True
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ self._in_context = False
+ # similar to _RequestContextManager, we do not need to check
+ # for exceptions, response object can close connection
+ # if state is broken
+ self.release()
+ await self.wait_for_close()
+
+
+class ClientRequest:
+ GET_METHODS = {
+ hdrs.METH_GET,
+ hdrs.METH_HEAD,
+ hdrs.METH_OPTIONS,
+ hdrs.METH_TRACE,
+ }
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
+
+ DEFAULT_HEADERS = {
+ hdrs.ACCEPT: "*/*",
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
+ }
+
+ # Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
+ _body: Union[None, payload.Payload] = None
+ auth = None
+ response = None
+
+ __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data
+
+ # These class defaults help create_autospec() work correctly.
+ # If autospec is improved in future, maybe these can be removed.
+ url = URL()
+ method = "GET"
+
+ _continue = None # waiter future for '100 Continue' response
+
+ _skip_auto_headers: Optional["CIMultiDict[None]"] = None
+
+ # N.B.
+ # Adding __del__ method with self._writer closing doesn't make sense
+ # because _writer is instance method, thus it keeps a reference to self.
+ # Until writer has finished finalizer will not be called.
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ params: Query = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ data: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ auth: Optional[BasicAuth] = None,
+ version: http.HttpVersion = http.HttpVersion11,
+ compress: Union[str, bool, None] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ response_class: Optional[Type["ClientResponse"]] = None,
+ proxy: Optional[URL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timer: Optional[BaseTimerContext] = None,
+ session: Optional["ClientSession"] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ proxy_headers: Optional[LooseHeaders] = None,
+ traces: Optional[List["Trace"]] = None,
+ trust_env: bool = False,
+ server_hostname: Optional[str] = None,
+ ):
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
+ raise ValueError(
+ f"Method cannot contain non-token characters {method!r} "
+ f"(found at least {match.group()!r})"
+ )
+ # URL forbids subclasses, so a simple type check is enough.
+ assert type(url) is URL, url
+ if proxy is not None:
+ assert type(proxy) is URL, proxy
+ # FIXME: session is None in tests only, need to fix tests
+ # assert session is not None
+ if TYPE_CHECKING:
+ assert session is not None
+ self._session = session
+ if params:
+ url = url.extend_query(params)
+ self.original_url = url
+ self.url = url.with_fragment(None) if url.raw_fragment else url
+ self.method = method.upper()
+ self.chunked = chunked
+ self.compress = compress
+ self.loop = loop
+ self.length = None
+ if response_class is None:
+ real_response_class = ClientResponse
+ else:
+ real_response_class = response_class
+ self.response_class: Type[ClientResponse] = real_response_class
+ self._timer = timer if timer is not None else TimerNoop()
+ self._ssl = ssl if ssl is not None else True
+ self.server_hostname = server_hostname
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ self.update_version(version)
+ self.update_host(url)
+ self.update_headers(headers)
+ self.update_auto_headers(skip_auto_headers)
+ self.update_cookies(cookies)
+ self.update_content_encoding(data)
+ self.update_auth(auth, trust_env)
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
+
+ self.update_body_from_data(data)
+ if data is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+ self.update_expect_continue(expect100)
+ self._traces = [] if traces is None else traces
+
+ def __reset_writer(self, _: object = None) -> None:
+ self.__writer = None
+
+ def _get_content_length(self) -> Optional[int]:
+ """Extract and validate Content-Length header value.
+
+ Returns parsed Content-Length value or None if not set.
+ Raises ValueError if header exists but cannot be parsed as an integer.
+ """
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ return None
+
+ content_length_hdr = self.headers[hdrs.CONTENT_LENGTH]
+ try:
+ return int(content_length_hdr)
+ except ValueError:
+ raise ValueError(
+ f"Invalid Content-Length header: {content_length_hdr}"
+ ) from None
+
+ @property
+ def skip_auto_headers(self) -> CIMultiDict[None]:
+ return self._skip_auto_headers or CIMultiDict()
+
+ @property
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
+ return self.__writer
+
+ @_writer.setter
+ def _writer(self, writer: "asyncio.Task[None]") -> None:
+ if self.__writer is not None:
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = writer
+ writer.add_done_callback(self.__reset_writer)
+
+ def is_ssl(self) -> bool:
+ return self.url.scheme in _SSL_SCHEMES
+
+ @property
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
+ return self._ssl
+
+ @property
+ def connection_key(self) -> ConnectionKey:
+ if proxy_headers := self.proxy_headers:
+ h: Optional[int] = hash(tuple(proxy_headers.items()))
+ else:
+ h = None
+ url = self.url
+ return tuple.__new__(
+ ConnectionKey,
+ (
+ url.raw_host or "",
+ url.port,
+ url.scheme in _SSL_SCHEMES,
+ self._ssl,
+ self.proxy,
+ self.proxy_auth,
+ h,
+ ),
+ )
+
+ @property
+ def host(self) -> str:
+ ret = self.url.raw_host
+ assert ret is not None
+ return ret
+
+ @property
+ def port(self) -> Optional[int]:
+ return self.url.port
+
+ @property
+ def body(self) -> Union[payload.Payload, Literal[b""]]:
+ """Request body."""
+ # empty body is represented as bytes for backwards compatibility
+ return self._body or b""
+
+ @body.setter
+ def body(self, value: Any) -> None:
+ """Set request body with warning for non-autoclose payloads.
+
+ WARNING: This setter must be called from within an event loop and is not
+ thread-safe. Setting body outside of an event loop may raise RuntimeError
+ when closing file-based payloads.
+
+ DEPRECATED: Direct assignment to body is deprecated and will be removed
+ in a future version. Use await update_body() instead for proper resource
+ management.
+ """
+ # Close existing payload if present
+ if self._body is not None:
+ # Warn if the payload needs manual closing
+ # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload
+ _warn_if_unclosed_payload(self._body, stacklevel=3)
+ # NOTE: In the future, when we remove sync close support,
+ # this setter will need to be removed and only the async
+ # update_body() method will be available. For now, we call
+ # _close() for backwards compatibility.
+ self._body._close()
+ self._update_body(value)
+
+ @property
+ def request_info(self) -> RequestInfo:
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
+ # These are created on every request, so we use a NamedTuple
+ # for performance reasons. We don't use the RequestInfo.__new__
+ # method because it has a different signature which is provided
+ # for backwards compatibility only.
+ return tuple.__new__(
+ RequestInfo, (self.url, self.method, headers, self.original_url)
+ )
+
+ @property
+ def session(self) -> "ClientSession":
+ """Return the ClientSession instance.
+
+ This property provides access to the ClientSession that initiated
+ this request, allowing middleware to make additional requests
+ using the same session.
+ """
+ return self._session
+
+ def update_host(self, url: URL) -> None:
+ """Update destination host, port and connection type (ssl)."""
+ # get host/port
+ if not url.raw_host:
+ raise InvalidURL(url)
+
+ # basic auth info
+ if url.raw_user or url.raw_password:
+ self.auth = helpers.BasicAuth(url.user or "", url.password or "")
+
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
+ """Convert request version to two elements tuple.
+
+ parser HTTP version '1.1' => (1, 1)
+ """
+ if isinstance(version, str):
+ v = [part.strip() for part in version.split(".", 1)]
+ try:
+ version = http.HttpVersion(int(v[0]), int(v[1]))
+ except ValueError:
+ raise ValueError(
+ f"Can not parse http version number: {version}"
+ ) from None
+ self.version = version
+
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
+ """Update request headers."""
+ self.headers: CIMultiDict[str] = CIMultiDict()
+
+ # Build the host header
+ host = self.url.host_port_subcomponent
+
+ # host_port_subcomponent is None when the URL is a relative URL.
+ # but we know we do not have a relative URL here.
+ assert host is not None
+ self.headers[hdrs.HOST] = host
+
+ if not headers:
+ return
+
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
+ headers = headers.items()
+
+ for key, value in headers: # type: ignore[misc]
+ # A special case for Host header
+ if key in hdrs.HOST_ALL:
+ self.headers[key] = value
+ else:
+ self.headers.add(key, value)
+
+ def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
+ if skip_auto_headers is not None:
+ self._skip_auto_headers = CIMultiDict(
+ (hdr, None) for hdr in sorted(skip_auto_headers)
+ )
+ used_headers = self.headers.copy()
+ used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
+ else:
+ # Fast path when there are no headers to skip
+ # which is the most common case.
+ used_headers = self.headers
+
+ for hdr, val in self.DEFAULT_HEADERS.items():
+ if hdr not in used_headers:
+ self.headers[hdr] = val
+
+ if hdrs.USER_AGENT not in used_headers:
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
+
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
+ """Update request cookies header."""
+ if not cookies:
+ return
+
+ c = SimpleCookie()
+ if hdrs.COOKIE in self.headers:
+ # parse_cookie_header for RFC 6265 compliant Cookie header parsing
+ c.update(parse_cookie_header(self.headers.get(hdrs.COOKIE, "")))
+ del self.headers[hdrs.COOKIE]
+
+ if isinstance(cookies, Mapping):
+ iter_cookies = cookies.items()
+ else:
+ iter_cookies = cookies # type: ignore[assignment]
+ for name, value in iter_cookies:
+ if isinstance(value, Morsel):
+ # Use helper to preserve coded_value exactly as sent by server
+ c[name] = preserve_morsel_with_coded_value(value)
+ else:
+ c[name] = value # type: ignore[assignment]
+
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
+
+ def update_content_encoding(self, data: Any) -> None:
+ """Set request content encoding."""
+ if not data:
+ # Don't compress an empty body.
+ self.compress = None
+ return
+
+ if self.headers.get(hdrs.CONTENT_ENCODING):
+ if self.compress:
+ raise ValueError(
+ "compress can not be set if Content-Encoding header is set"
+ )
+ elif self.compress:
+ if not isinstance(self.compress, str):
+ self.compress = "deflate"
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
+ self.chunked = True # enable chunked, no need to deal with length
+
+ def update_transfer_encoding(self) -> None:
+ """Analyze transfer-encoding header."""
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ if "chunked" in te:
+ if self.chunked:
+ raise ValueError(
+ "chunked can not be set "
+ 'if "Transfer-Encoding: chunked" header is set'
+ )
+
+ elif self.chunked:
+ if hdrs.CONTENT_LENGTH in self.headers:
+ raise ValueError(
+ "chunked can not be set if Content-Length header is set"
+ )
+
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
+
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
+ """Set basic auth."""
+ if auth is None:
+ auth = self.auth
+ if auth is None:
+ return
+
+ if not isinstance(auth, helpers.BasicAuth):
+ raise TypeError("BasicAuth() tuple is required instead")
+
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
+
+ def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None:
+ """Update request body from data."""
+ if self._body is not None:
+ _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel)
+
+ if body is None:
+ self._body = None
+ # Set Content-Length to 0 when body is None for methods that expect a body
+ if (
+ self.method not in self.GET_METHODS
+ and not self.chunked
+ and hdrs.CONTENT_LENGTH not in self.headers
+ ):
+ self.headers[hdrs.CONTENT_LENGTH] = "0"
+ return
+
+ # FormData
+ maybe_payload = body() if isinstance(body, FormData) else body
+
+ try:
+ body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None)
+ except payload.LookupError:
+ body_payload = FormData(maybe_payload)() # type: ignore[arg-type]
+
+ self._body = body_payload
+ # enable chunked encoding if needed
+ if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
+ if (size := body_payload.size) is not None:
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
+ else:
+ self.chunked = True
+
+ # copy payload headers
+ assert body_payload.headers
+ headers = self.headers
+ skip_headers = self._skip_auto_headers
+ for key, value in body_payload.headers.items():
+ if key in headers or (skip_headers is not None and key in skip_headers):
+ continue
+ headers[key] = value
+
+ def _update_body(self, body: Any) -> None:
+ """Update request body after its already been set."""
+ # Remove existing Content-Length header since body is changing
+ if hdrs.CONTENT_LENGTH in self.headers:
+ del self.headers[hdrs.CONTENT_LENGTH]
+
+ # Remove existing Transfer-Encoding header to avoid conflicts
+ if self.chunked and hdrs.TRANSFER_ENCODING in self.headers:
+ del self.headers[hdrs.TRANSFER_ENCODING]
+
+ # Now update the body using the existing method
+ # Called from _update_body, add 1 to stacklevel from caller
+ self.update_body_from_data(body, _stacklevel=4)
+
+ # Update transfer encoding headers if needed (same logic as __init__)
+ if body is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+
+ async def update_body(self, body: Any) -> None:
+ """
+ Update request body and close previous payload if needed.
+
+ This method safely updates the request body by first closing any existing
+ payload to prevent resource leaks, then setting the new body.
+
+ IMPORTANT: Always use this method instead of setting request.body directly.
+ Direct assignment to request.body will leak resources if the previous body
+ contains file handles, streams, or other resources that need cleanup.
+
+ Args:
+ body: The new body content. Can be:
+ - bytes/bytearray: Raw binary data
+ - str: Text data (will be encoded using charset from Content-Type)
+ - FormData: Form data that will be encoded as multipart/form-data
+ - Payload: A pre-configured payload object
+ - AsyncIterable: An async iterable of bytes chunks
+ - File-like object: Will be read and sent as binary data
+ - None: Clears the body
+
+ Usage:
+ # CORRECT: Use update_body
+ await request.update_body(b"new request data")
+
+ # WRONG: Don't set body directly
+ # request.body = b"new request data" # This will leak resources!
+
+ # Update with form data
+ form_data = FormData()
+ form_data.add_field('field', 'value')
+ await request.update_body(form_data)
+
+ # Clear body
+ await request.update_body(None)
+
+ Note:
+ This method is async because it may need to close file handles or
+ other resources associated with the previous payload. Always await
+ this method to ensure proper cleanup.
+
+ Warning:
+ Setting request.body directly is highly discouraged and can lead to:
+ - Resource leaks (unclosed file handles, streams)
+ - Memory leaks (unreleased buffers)
+ - Unexpected behavior with streaming payloads
+
+ It is not recommended to change the payload type in middleware. If the
+ body was already set (e.g., as bytes), it's best to keep the same type
+ rather than converting it (e.g., to str) as this may result in unexpected
+ behavior.
+
+ See Also:
+ - update_body_from_data: Synchronous body update without cleanup
+ - body property: Direct body access (STRONGLY DISCOURAGED)
+
+ """
+ # Close existing payload if it exists and needs closing
+ if self._body is not None:
+ await self._body.close()
+ self._update_body(body)
+
+ def update_expect_continue(self, expect: bool = False) -> None:
+ if expect:
+ self.headers[hdrs.EXPECT] = "100-continue"
+ elif (
+ hdrs.EXPECT in self.headers
+ and self.headers[hdrs.EXPECT].lower() == "100-continue"
+ ):
+ expect = True
+
+ if expect:
+ self._continue = self.loop.create_future()
+
+ def update_proxy(
+ self,
+ proxy: Optional[URL],
+ proxy_auth: Optional[BasicAuth],
+ proxy_headers: Optional[LooseHeaders],
+ ) -> None:
+ self.proxy = proxy
+ if proxy is None:
+ self.proxy_auth = None
+ self.proxy_headers = None
+ return
+
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
+ self.proxy_auth = proxy_auth
+
+ if proxy_headers is not None and not isinstance(
+ proxy_headers, (MultiDict, MultiDictProxy)
+ ):
+ proxy_headers = CIMultiDict(proxy_headers)
+ self.proxy_headers = proxy_headers
+
+ async def write_bytes(
+ self,
+ writer: AbstractStreamWriter,
+ conn: "Connection",
+ content_length: Optional[int] = None,
+ ) -> None:
+ """
+ Write the request body to the connection stream.
+
+ This method handles writing different types of request bodies:
+ 1. Payload objects (using their specialized write_with_length method)
+ 2. Bytes/bytearray objects
+ 3. Iterable body content
+
+ Args:
+ writer: The stream writer to write the body to
+ conn: The connection being used for this request
+ content_length: Optional maximum number of bytes to write from the body
+ (None means write the entire body)
+
+ The method properly handles:
+ - Waiting for 100-Continue responses if required
+ - Content length constraints for chunked encoding
+ - Error handling for network issues, cancellation, and other exceptions
+ - Signaling EOF and timeout management
+
+ Raises:
+ ClientOSError: When there's an OS-level error writing the body
+ ClientConnectionError: When there's a general connection error
+ asyncio.CancelledError: When the operation is cancelled
+
+ """
+ # 100 response
+ if self._continue is not None:
+ # Force headers to be sent before waiting for 100-continue
+ writer.send_headers()
+ await writer.drain()
+ await self._continue
+
+ protocol = conn.protocol
+ assert protocol is not None
+ try:
+ # This should be a rare case but the
+ # self._body can be set to None while
+ # the task is being started or we wait above
+ # for the 100-continue response.
+ # The more likely case is we have an empty
+ # payload, but 100-continue is still expected.
+ if self._body is not None:
+ await self._body.write_with_length(writer, content_length)
+ except OSError as underlying_exc:
+ reraised_exc = underlying_exc
+
+ # Distinguish between timeout and other OS errors for better error reporting
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
+ underlying_exc, asyncio.TimeoutError
+ )
+ if exc_is_not_timeout:
+ reraised_exc = ClientOSError(
+ underlying_exc.errno,
+ f"Can not write request body for {self.url !s}",
+ )
+
+ set_exception(protocol, reraised_exc, underlying_exc)
+ except asyncio.CancelledError:
+ # Body hasn't been fully sent, so connection can't be reused
+ conn.close()
+ raise
+ except Exception as underlying_exc:
+ set_exception(
+ protocol,
+ ClientConnectionError(
+ "Failed to send bytes into the underlying connection "
+ f"{conn !s}: {underlying_exc!r}",
+ ),
+ underlying_exc,
+ )
+ else:
+ # Successfully wrote the body, signal EOF and start response timeout
+ await writer.write_eof()
+ protocol.start_timeout()
+
+ async def send(self, conn: "Connection") -> "ClientResponse":
+ # Specify request target:
+ # - CONNECT request must send authority form URI
+ # - not CONNECT proxy must send absolute form URI
+ # - most common is origin form URI
+ if self.method == hdrs.METH_CONNECT:
+ connect_host = self.url.host_subcomponent
+ assert connect_host is not None
+ path = f"{connect_host}:{self.url.port}"
+ elif self.proxy and not self.is_ssl():
+ path = str(self.url)
+ else:
+ path = self.url.raw_path_qs
+
+ protocol = conn.protocol
+ assert protocol is not None
+ writer = StreamWriter(
+ protocol,
+ self.loop,
+ on_chunk_sent=(
+ functools.partial(self._on_chunk_request_sent, self.method, self.url)
+ if self._traces
+ else None
+ ),
+ on_headers_sent=(
+ functools.partial(self._on_headers_request_sent, self.method, self.url)
+ if self._traces
+ else None
+ ),
+ )
+
+ if self.compress:
+ writer.enable_compression(self.compress) # type: ignore[arg-type]
+
+ if self.chunked is not None:
+ writer.enable_chunking()
+
+ # set default content-type
+ if (
+ self.method in self.POST_METHODS
+ and (
+ self._skip_auto_headers is None
+ or hdrs.CONTENT_TYPE not in self._skip_auto_headers
+ )
+ and hdrs.CONTENT_TYPE not in self.headers
+ ):
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
+
+ v = self.version
+ if hdrs.CONNECTION not in self.headers:
+ if conn._connector.force_close:
+ if v == HttpVersion11:
+ self.headers[hdrs.CONNECTION] = "close"
+ elif v == HttpVersion10:
+ self.headers[hdrs.CONNECTION] = "keep-alive"
+
+ # status + headers
+ status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
+
+ # Buffer headers for potential coalescing with body
+ await writer.write_headers(status_line, self.headers)
+
+ task: Optional["asyncio.Task[None]"]
+ if self._body or self._continue is not None or protocol.writing_paused:
+ coro = self.write_bytes(writer, conn, self._get_content_length())
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to write
+ # bytes immediately to avoid having to schedule
+ # the task on the event loop.
+ task = asyncio.Task(coro, loop=self.loop, eager_start=True)
+ else:
+ task = self.loop.create_task(coro)
+ if task.done():
+ task = None
+ else:
+ self._writer = task
+ else:
+ # We have nothing to write because
+ # - there is no body
+ # - the protocol does not have writing paused
+ # - we are not waiting for a 100-continue response
+ protocol.start_timeout()
+ writer.set_eof()
+ task = None
+ response_class = self.response_class
+ assert response_class is not None
+ self.response = response_class(
+ self.method,
+ self.original_url,
+ writer=task,
+ continue100=self._continue,
+ timer=self._timer,
+ request_info=self.request_info,
+ traces=self._traces,
+ loop=self.loop,
+ session=self._session,
+ )
+ return self.response
+
+ async def close(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+
+ def terminate(self) -> None:
+ if self.__writer is not None:
+ if not self.loop.is_closed():
+ self.__writer.cancel()
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = None
+
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
+ for trace in self._traces:
+ await trace.send_request_chunk_sent(method, url, chunk)
+
+ async def _on_headers_request_sent(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ for trace in self._traces:
+ await trace.send_request_headers(method, url, headers)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_ws.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_ws.py"
new file mode 100644
index 0000000..daa57d1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/client_ws.py"
@@ -0,0 +1,428 @@
+"""WebSocket client for asyncio."""
+
+import asyncio
+import sys
+from types import TracebackType
+from typing import Any, Optional, Type, cast
+
+import attr
+
+from ._websocket.reader import WebSocketDataQueue
+from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError
+from .client_reqrep import ClientResponse
+from .helpers import calculate_timeout_when, set_result
+from .http import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WebSocketError,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType,
+)
+from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter
+from .streams import EofStream
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ DEFAULT_JSON_ENCODER,
+ JSONDecoder,
+ JSONEncoder,
+)
+
+if sys.version_info >= (3, 11):
+ import asyncio as async_timeout
+else:
+ import async_timeout
+
+
+@attr.s(frozen=True, slots=True)
+class ClientWSTimeout:
+ ws_receive = attr.ib(type=Optional[float], default=None)
+ ws_close = attr.ib(type=Optional[float], default=None)
+
+
+DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0)
+
+
+class ClientWebSocketResponse:
+ def __init__(
+ self,
+ reader: WebSocketDataQueue,
+ writer: WebSocketWriter,
+ protocol: Optional[str],
+ response: ClientResponse,
+ timeout: ClientWSTimeout,
+ autoclose: bool,
+ autoping: bool,
+ loop: asyncio.AbstractEventLoop,
+ *,
+ heartbeat: Optional[float] = None,
+ compress: int = 0,
+ client_notakeover: bool = False,
+ ) -> None:
+ self._response = response
+ self._conn = response.connection
+
+ self._writer = writer
+ self._reader = reader
+ self._protocol = protocol
+ self._closed = False
+ self._closing = False
+ self._close_code: Optional[int] = None
+ self._timeout = timeout
+ self._autoclose = autoclose
+ self._autoping = autoping
+ self._heartbeat = heartbeat
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
+ self._heartbeat_when: float = 0.0
+ if heartbeat is not None:
+ self._pong_heartbeat = heartbeat / 2.0
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
+ self._loop = loop
+ self._waiting: bool = False
+ self._close_wait: Optional[asyncio.Future[None]] = None
+ self._exception: Optional[BaseException] = None
+ self._compress = compress
+ self._client_notakeover = client_notakeover
+ self._ping_task: Optional[asyncio.Task[None]] = None
+
+ self._reset_heartbeat()
+
+ def _cancel_heartbeat(self) -> None:
+ self._cancel_pong_response_cb()
+ if self._heartbeat_cb is not None:
+ self._heartbeat_cb.cancel()
+ self._heartbeat_cb = None
+ if self._ping_task is not None:
+ self._ping_task.cancel()
+ self._ping_task = None
+
+ def _cancel_pong_response_cb(self) -> None:
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = None
+
+ def _reset_heartbeat(self) -> None:
+ if self._heartbeat is None:
+ return
+ self._cancel_pong_response_cb()
+ loop = self._loop
+ assert loop is not None
+ conn = self._conn
+ timeout_ceil_threshold = (
+ conn._connector._timeout_ceil_threshold if conn is not None else 5
+ )
+ now = loop.time()
+ when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)
+ self._heartbeat_when = when
+ if self._heartbeat_cb is None:
+ # We do not cancel the previous heartbeat_cb here because
+ # it generates a significant amount of TimerHandle churn
+ # which causes asyncio to rebuild the heap frequently.
+ # Instead _send_heartbeat() will reschedule the next
+ # heartbeat if it fires too early.
+ self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)
+
+ def _send_heartbeat(self) -> None:
+ self._heartbeat_cb = None
+ loop = self._loop
+ now = loop.time()
+ if now < self._heartbeat_when:
+ # Heartbeat fired too early, reschedule
+ self._heartbeat_cb = loop.call_at(
+ self._heartbeat_when, self._send_heartbeat
+ )
+ return
+
+ conn = self._conn
+ timeout_ceil_threshold = (
+ conn._connector._timeout_ceil_threshold if conn is not None else 5
+ )
+ when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)
+ self._cancel_pong_response_cb()
+ self._pong_response_cb = loop.call_at(when, self._pong_not_received)
+
+ coro = self._writer.send_frame(b"", WSMsgType.PING)
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to send the ping
+ # immediately to avoid having to schedule
+ # the task on the event loop.
+ ping_task = asyncio.Task(coro, loop=loop, eager_start=True)
+ else:
+ ping_task = loop.create_task(coro)
+
+ if not ping_task.done():
+ self._ping_task = ping_task
+ ping_task.add_done_callback(self._ping_task_done)
+ else:
+ self._ping_task_done(ping_task)
+
+ def _ping_task_done(self, task: "asyncio.Task[None]") -> None:
+ """Callback for when the ping task completes."""
+ if not task.cancelled() and (exc := task.exception()):
+ self._handle_ping_pong_exception(exc)
+ self._ping_task = None
+
+ def _pong_not_received(self) -> None:
+ self._handle_ping_pong_exception(
+ ServerTimeoutError(f"No PONG received after {self._pong_heartbeat} seconds")
+ )
+
+ def _handle_ping_pong_exception(self, exc: BaseException) -> None:
+ """Handle exceptions raised during ping/pong processing."""
+ if self._closed:
+ return
+ self._set_closed()
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ if self._waiting and not self._closing:
+ self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0)
+
+ def _set_closed(self) -> None:
+ """Set the connection to closed.
+
+ Cancel any heartbeat timers and set the closed flag.
+ """
+ self._closed = True
+ self._cancel_heartbeat()
+
+ def _set_closing(self) -> None:
+ """Set the connection to closing.
+
+ Cancel any heartbeat timers and set the closing flag.
+ """
+ self._closing = True
+ self._cancel_heartbeat()
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def close_code(self) -> Optional[int]:
+ return self._close_code
+
+ @property
+ def protocol(self) -> Optional[str]:
+ return self._protocol
+
+ @property
+ def compress(self) -> int:
+ return self._compress
+
+ @property
+ def client_notakeover(self) -> bool:
+ return self._client_notakeover
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """extra info from connection transport"""
+ conn = self._response.connection
+ if conn is None:
+ return default
+ transport = conn.transport
+ if transport is None:
+ return default
+ return transport.get_extra_info(name, default)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ async def ping(self, message: bytes = b"") -> None:
+ await self._writer.send_frame(message, WSMsgType.PING)
+
+ async def pong(self, message: bytes = b"") -> None:
+ await self._writer.send_frame(message, WSMsgType.PONG)
+
+ async def send_frame(
+ self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None
+ ) -> None:
+ """Send a frame over the websocket."""
+ await self._writer.send_frame(message, opcode, compress)
+
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
+ if not isinstance(data, str):
+ raise TypeError("data argument must be str (%r)" % type(data))
+ await self._writer.send_frame(
+ data.encode("utf-8"), WSMsgType.TEXT, compress=compress
+ )
+
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
+ await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress)
+
+ async def send_json(
+ self,
+ data: Any,
+ compress: Optional[int] = None,
+ *,
+ dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
+ ) -> None:
+ await self.send_str(dumps(data), compress=compress)
+
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
+ # we need to break `receive()` cycle first,
+ # `close()` may be called from different task
+ if self._waiting and not self._closing:
+ assert self._loop is not None
+ self._close_wait = self._loop.create_future()
+ self._set_closing()
+ self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
+ await self._close_wait
+
+ if self._closed:
+ return False
+
+ self._set_closed()
+ try:
+ await self._writer.close(code, message)
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._response.close()
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ return True
+
+ if self._close_code:
+ self._response.close()
+ return True
+
+ while True:
+ try:
+ async with async_timeout.timeout(self._timeout.ws_close):
+ msg = await self._reader.read()
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._response.close()
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ return True
+
+ if msg.type is WSMsgType.CLOSE:
+ self._close_code = msg.data
+ self._response.close()
+ return True
+
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
+ receive_timeout = timeout or self._timeout.ws_receive
+
+ while True:
+ if self._waiting:
+ raise RuntimeError("Concurrent call to receive() is not allowed")
+
+ if self._closed:
+ return WS_CLOSED_MESSAGE
+ elif self._closing:
+ await self.close()
+ return WS_CLOSED_MESSAGE
+
+ try:
+ self._waiting = True
+ try:
+ if receive_timeout:
+ # Entering the context manager and creating
+ # Timeout() object can take almost 50% of the
+ # run time in this loop so we avoid it if
+ # there is no read timeout.
+ async with async_timeout.timeout(receive_timeout):
+ msg = await self._reader.read()
+ else:
+ msg = await self._reader.read()
+ self._reset_heartbeat()
+ finally:
+ self._waiting = False
+ if self._close_wait:
+ set_result(self._close_wait, None)
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except EofStream:
+ self._close_code = WSCloseCode.OK
+ await self.close()
+ return WSMessage(WSMsgType.CLOSED, None, None)
+ except ClientError:
+ # Likely ServerDisconnectedError when connection is lost
+ self._set_closed()
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ return WS_CLOSED_MESSAGE
+ except WebSocketError as exc:
+ self._close_code = exc.code
+ await self.close(code=exc.code)
+ return WSMessage(WSMsgType.ERROR, exc, None)
+ except Exception as exc:
+ self._exception = exc
+ self._set_closing()
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ await self.close()
+ return WSMessage(WSMsgType.ERROR, exc, None)
+
+ if msg.type not in _INTERNAL_RECEIVE_TYPES:
+ # If its not a close/closing/ping/pong message
+ # we can return it immediately
+ return msg
+
+ if msg.type is WSMsgType.CLOSE:
+ self._set_closing()
+ self._close_code = msg.data
+ if not self._closed and self._autoclose:
+ await self.close()
+ elif msg.type is WSMsgType.CLOSING:
+ self._set_closing()
+ elif msg.type is WSMsgType.PING and self._autoping:
+ await self.pong(msg.data)
+ continue
+ elif msg.type is WSMsgType.PONG and self._autoping:
+ continue
+
+ return msg
+
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
+ msg = await self.receive(timeout)
+ if msg.type is not WSMsgType.TEXT:
+ raise WSMessageTypeError(
+ f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT"
+ )
+ return cast(str, msg.data)
+
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
+ msg = await self.receive(timeout)
+ if msg.type is not WSMsgType.BINARY:
+ raise WSMessageTypeError(
+ f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY"
+ )
+ return cast(bytes, msg.data)
+
+ async def receive_json(
+ self,
+ *,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ timeout: Optional[float] = None,
+ ) -> Any:
+ data = await self.receive_str(timeout=timeout)
+ return loads(data)
+
+ def __aiter__(self) -> "ClientWebSocketResponse":
+ return self
+
+ async def __anext__(self) -> WSMessage:
+ msg = await self.receive()
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
+ raise StopAsyncIteration
+ return msg
+
+ async def __aenter__(self) -> "ClientWebSocketResponse":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/compression_utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/compression_utils.py"
new file mode 100644
index 0000000..e478d24
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/compression_utils.py"
@@ -0,0 +1,348 @@
+import asyncio
+import sys
+import zlib
+from abc import ABC, abstractmethod
+from concurrent.futures import Executor
+from typing import Any, Final, Optional, Protocol, TypedDict, cast
+
+if sys.version_info >= (3, 12):
+ from collections.abc import Buffer
+else:
+ from typing import Union
+
+ Buffer = Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
+
+try:
+ try:
+ import brotlicffi as brotli
+ except ImportError:
+ import brotli
+
+ HAS_BROTLI = True
+except ImportError: # pragma: no cover
+ HAS_BROTLI = False
+
+try:
+ if sys.version_info >= (3, 14):
+ from compression.zstd import ZstdDecompressor # noqa: I900
+ else: # TODO(PY314): Remove mentions of backports.zstd across codebase
+ from backports.zstd import ZstdDecompressor
+
+ HAS_ZSTD = True
+except ImportError:
+ HAS_ZSTD = False
+
+
+MAX_SYNC_CHUNK_SIZE = 4096
+DEFAULT_MAX_DECOMPRESS_SIZE = 2**25 # 32MiB
+
+# Unlimited decompression constants - different libraries use different conventions
+ZLIB_MAX_LENGTH_UNLIMITED = 0 # zlib uses 0 to mean unlimited
+ZSTD_MAX_LENGTH_UNLIMITED = -1 # zstd uses -1 to mean unlimited
+
+
+class ZLibCompressObjProtocol(Protocol):
+ def compress(self, data: Buffer) -> bytes: ...
+ def flush(self, mode: int = ..., /) -> bytes: ...
+
+
+class ZLibDecompressObjProtocol(Protocol):
+ def decompress(self, data: Buffer, max_length: int = ...) -> bytes: ...
+ def flush(self, length: int = ..., /) -> bytes: ...
+
+ @property
+ def eof(self) -> bool: ...
+
+
+class ZLibBackendProtocol(Protocol):
+ MAX_WBITS: int
+ Z_FULL_FLUSH: int
+ Z_SYNC_FLUSH: int
+ Z_BEST_SPEED: int
+ Z_FINISH: int
+
+ def compressobj(
+ self,
+ level: int = ...,
+ method: int = ...,
+ wbits: int = ...,
+ memLevel: int = ...,
+ strategy: int = ...,
+ zdict: Optional[Buffer] = ...,
+ ) -> ZLibCompressObjProtocol: ...
+ def decompressobj(
+ self, wbits: int = ..., zdict: Buffer = ...
+ ) -> ZLibDecompressObjProtocol: ...
+
+ def compress(
+ self, data: Buffer, /, level: int = ..., wbits: int = ...
+ ) -> bytes: ...
+ def decompress(
+ self, data: Buffer, /, wbits: int = ..., bufsize: int = ...
+ ) -> bytes: ...
+
+
+class CompressObjArgs(TypedDict, total=False):
+ wbits: int
+ strategy: int
+ level: int
+
+
+class ZLibBackendWrapper:
+ def __init__(self, _zlib_backend: ZLibBackendProtocol):
+ self._zlib_backend: ZLibBackendProtocol = _zlib_backend
+
+ @property
+ def name(self) -> str:
+ return getattr(self._zlib_backend, "__name__", "undefined")
+
+ @property
+ def MAX_WBITS(self) -> int:
+ return self._zlib_backend.MAX_WBITS
+
+ @property
+ def Z_FULL_FLUSH(self) -> int:
+ return self._zlib_backend.Z_FULL_FLUSH
+
+ @property
+ def Z_SYNC_FLUSH(self) -> int:
+ return self._zlib_backend.Z_SYNC_FLUSH
+
+ @property
+ def Z_BEST_SPEED(self) -> int:
+ return self._zlib_backend.Z_BEST_SPEED
+
+ @property
+ def Z_FINISH(self) -> int:
+ return self._zlib_backend.Z_FINISH
+
+ def compressobj(self, *args: Any, **kwargs: Any) -> ZLibCompressObjProtocol:
+ return self._zlib_backend.compressobj(*args, **kwargs)
+
+ def decompressobj(self, *args: Any, **kwargs: Any) -> ZLibDecompressObjProtocol:
+ return self._zlib_backend.decompressobj(*args, **kwargs)
+
+ def compress(self, data: Buffer, *args: Any, **kwargs: Any) -> bytes:
+ return self._zlib_backend.compress(data, *args, **kwargs)
+
+ def decompress(self, data: Buffer, *args: Any, **kwargs: Any) -> bytes:
+ return self._zlib_backend.decompress(data, *args, **kwargs)
+
+ # Everything not explicitly listed in the Protocol we just pass through
+ def __getattr__(self, attrname: str) -> Any:
+ return getattr(self._zlib_backend, attrname)
+
+
+ZLibBackend: ZLibBackendWrapper = ZLibBackendWrapper(zlib)
+
+
+def set_zlib_backend(new_zlib_backend: ZLibBackendProtocol) -> None:
+ ZLibBackend._zlib_backend = new_zlib_backend
+
+
+def encoding_to_mode(
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+) -> int:
+ if encoding == "gzip":
+ return 16 + ZLibBackend.MAX_WBITS
+
+ return -ZLibBackend.MAX_WBITS if suppress_deflate_header else ZLibBackend.MAX_WBITS
+
+
+class DecompressionBaseHandler(ABC):
+ def __init__(
+ self,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ """Base class for decompression handlers."""
+ self._executor = executor
+ self._max_sync_chunk_size = max_sync_chunk_size
+
+ @abstractmethod
+ def decompress_sync(
+ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+ ) -> bytes:
+ """Decompress the given data."""
+
+ async def decompress(
+ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+ ) -> bytes:
+ """Decompress the given data."""
+ if (
+ self._max_sync_chunk_size is not None
+ and len(data) > self._max_sync_chunk_size
+ ):
+ return await asyncio.get_event_loop().run_in_executor(
+ self._executor, self.decompress_sync, data, max_length
+ )
+ return self.decompress_sync(data, max_length)
+
+
+class ZLibCompressor:
+ def __init__(
+ self,
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+ level: Optional[int] = None,
+ wbits: Optional[int] = None,
+ strategy: Optional[int] = None,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ self._executor = executor
+ self._max_sync_chunk_size = max_sync_chunk_size
+ self._mode = (
+ encoding_to_mode(encoding, suppress_deflate_header)
+ if wbits is None
+ else wbits
+ )
+ self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend)
+
+ kwargs: CompressObjArgs = {}
+ kwargs["wbits"] = self._mode
+ if strategy is not None:
+ kwargs["strategy"] = strategy
+ if level is not None:
+ kwargs["level"] = level
+ self._compressor = self._zlib_backend.compressobj(**kwargs)
+
+ def compress_sync(self, data: bytes) -> bytes:
+ return self._compressor.compress(data)
+
+ async def compress(self, data: bytes) -> bytes:
+ """Compress the data and returned the compressed bytes.
+
+ Note that flush() must be called after the last call to compress()
+
+ If the data size is large than the max_sync_chunk_size, the compression
+ will be done in the executor. Otherwise, the compression will be done
+ in the event loop.
+
+ **WARNING: This method is NOT cancellation-safe when used with flush().**
+ If this operation is cancelled, the compressor state may be corrupted.
+ The connection MUST be closed after cancellation to avoid data corruption
+ in subsequent compress operations.
+
+ For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap
+ compress() + flush() + send operations in a shield and lock to ensure atomicity.
+ """
+ # For large payloads, offload compression to executor to avoid blocking event loop
+ should_use_executor = (
+ self._max_sync_chunk_size is not None
+ and len(data) > self._max_sync_chunk_size
+ )
+ if should_use_executor:
+ return await asyncio.get_running_loop().run_in_executor(
+ self._executor, self._compressor.compress, data
+ )
+ return self.compress_sync(data)
+
+ def flush(self, mode: Optional[int] = None) -> bytes:
+ """Flush the compressor synchronously.
+
+ **WARNING: This method is NOT cancellation-safe when called after compress().**
+ The flush() operation accesses shared compressor state. If compress() was
+ cancelled, calling flush() may result in corrupted data. The connection MUST
+ be closed after compress() cancellation.
+
+ For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap
+ compress() + flush() + send operations in a shield and lock to ensure atomicity.
+ """
+ return self._compressor.flush(
+ mode if mode is not None else self._zlib_backend.Z_FINISH
+ )
+
+
+class ZLibDecompressor(DecompressionBaseHandler):
+ def __init__(
+ self,
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
+ self._mode = encoding_to_mode(encoding, suppress_deflate_header)
+ self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend)
+ self._decompressor = self._zlib_backend.decompressobj(wbits=self._mode)
+
+ def decompress_sync(
+ self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+ ) -> bytes:
+ return self._decompressor.decompress(data, max_length)
+
+ def flush(self, length: int = 0) -> bytes:
+ return (
+ self._decompressor.flush(length)
+ if length > 0
+ else self._decompressor.flush()
+ )
+
+ @property
+ def eof(self) -> bool:
+ return self._decompressor.eof
+
+
+class BrotliDecompressor(DecompressionBaseHandler):
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(
+ self,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ) -> None:
+ """Decompress data using the Brotli library."""
+ if not HAS_BROTLI:
+ raise RuntimeError(
+ "The brotli decompression is not available. "
+ "Please install `Brotli` module"
+ )
+ self._obj = brotli.Decompressor()
+ super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
+
+ def decompress_sync(
+ self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+ ) -> bytes:
+ """Decompress the given data."""
+ if hasattr(self._obj, "decompress"):
+ return cast(bytes, self._obj.decompress(data, max_length))
+ return cast(bytes, self._obj.process(data, max_length))
+
+ def flush(self) -> bytes:
+ """Flush the decompressor."""
+ if hasattr(self._obj, "flush"):
+ return cast(bytes, self._obj.flush())
+ return b""
+
+
+class ZSTDDecompressor(DecompressionBaseHandler):
+ def __init__(
+ self,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ) -> None:
+ if not HAS_ZSTD:
+ raise RuntimeError(
+ "The zstd decompression is not available. "
+ "Please install `backports.zstd` module"
+ )
+ self._obj = ZstdDecompressor()
+ super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size)
+
+ def decompress_sync(
+ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED
+ ) -> bytes:
+ # zstd uses -1 for unlimited, while zlib uses 0 for unlimited
+ # Convert the zlib convention (0=unlimited) to zstd convention (-1=unlimited)
+ zstd_max_length = (
+ ZSTD_MAX_LENGTH_UNLIMITED
+ if max_length == ZLIB_MAX_LENGTH_UNLIMITED
+ else max_length
+ )
+ return self._obj.decompress(data, zstd_max_length)
+
+ def flush(self) -> bytes:
+ return b""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/connector.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/connector.py"
new file mode 100644
index 0000000..290a424
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/connector.py"
@@ -0,0 +1,1842 @@
+import asyncio
+import functools
+import random
+import socket
+import sys
+import traceback
+import warnings
+from collections import OrderedDict, defaultdict, deque
+from contextlib import suppress
+from http import HTTPStatus
+from itertools import chain, cycle, islice
+from time import monotonic
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ DefaultDict,
+ Deque,
+ Dict,
+ Iterator,
+ List,
+ Literal,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import aiohappyeyeballs
+from aiohappyeyeballs import AddrInfoType, SocketFactoryType
+
+from . import hdrs, helpers
+from .abc import AbstractResolver, ResolveResult
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientConnectorCertificateError,
+ ClientConnectorDNSError,
+ ClientConnectorError,
+ ClientConnectorSSLError,
+ ClientHttpProxyError,
+ ClientProxyConnectionError,
+ ServerFingerprintMismatch,
+ UnixClientConnectorError,
+ cert_errors,
+ ssl_errors,
+)
+from .client_proto import ResponseHandler
+from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
+from .helpers import (
+ _SENTINEL,
+ ceil_timeout,
+ is_ip_address,
+ noop,
+ sentinel,
+ set_exception,
+ set_result,
+)
+from .log import client_logger
+from .resolver import DefaultResolver
+
+if sys.version_info >= (3, 12):
+ from collections.abc import Buffer
+else:
+ Buffer = Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
+
+if TYPE_CHECKING:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+else:
+ try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+ except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+EMPTY_SCHEMA_SET = frozenset({""})
+HTTP_SCHEMA_SET = frozenset({"http", "https"})
+WS_SCHEMA_SET = frozenset({"ws", "wss"})
+
+HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET
+HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET
+
+NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < (
+ 3,
+ 13,
+ 1,
+) or sys.version_info < (3, 12, 7)
+# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960
+# which first appeared in Python 3.12.7 and 3.13.1
+
+
+__all__ = (
+ "BaseConnector",
+ "TCPConnector",
+ "UnixConnector",
+ "NamedPipeConnector",
+ "AddrInfoType",
+ "SocketFactoryType",
+)
+
+
+if TYPE_CHECKING:
+ from .client import ClientTimeout
+ from .client_reqrep import ConnectionKey
+ from .tracing import Trace
+
+
+class _DeprecationWaiter:
+ __slots__ = ("_awaitable", "_awaited")
+
+ def __init__(self, awaitable: Awaitable[Any]) -> None:
+ self._awaitable = awaitable
+ self._awaited = False
+
+ def __await__(self) -> Any:
+ self._awaited = True
+ return self._awaitable.__await__()
+
+ def __del__(self) -> None:
+ if not self._awaited:
+ warnings.warn(
+ "Connector.close() is a coroutine, "
+ "please use await connector.close()",
+ DeprecationWarning,
+ )
+
+
+async def _wait_for_close(waiters: List[Awaitable[object]]) -> None:
+ """Wait for all waiters to finish closing."""
+ results = await asyncio.gather(*waiters, return_exceptions=True)
+ for res in results:
+ if isinstance(res, Exception):
+ client_logger.debug("Error while closing connector: %r", res)
+
+
+class Connection:
+
+ _source_traceback = None
+
+ def __init__(
+ self,
+ connector: "BaseConnector",
+ key: "ConnectionKey",
+ protocol: ResponseHandler,
+ loop: asyncio.AbstractEventLoop,
+ ) -> None:
+ self._key = key
+ self._connector = connector
+ self._loop = loop
+ self._protocol: Optional[ResponseHandler] = protocol
+ self._callbacks: List[Callable[[], None]] = []
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ def __repr__(self) -> str:
+ return f"Connection<{self._key}>"
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._protocol is not None:
+ kwargs = {"source": self}
+ _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
+ if self._loop.is_closed():
+ return
+
+ self._connector._release(self._key, self._protocol, should_close=True)
+
+ context = {"client_connection": self, "message": "Unclosed connection"}
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __bool__(self) -> Literal[True]:
+ """Force subclasses to not be falsy, to make checks simpler."""
+ return True
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ warnings.warn(
+ "connector.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ if self._protocol is None:
+ return None
+ return self._protocol.transport
+
+ @property
+ def protocol(self) -> Optional[ResponseHandler]:
+ return self._protocol
+
+ def add_callback(self, callback: Callable[[], None]) -> None:
+ if callback is not None:
+ self._callbacks.append(callback)
+
+ def _notify_release(self) -> None:
+ callbacks, self._callbacks = self._callbacks[:], []
+
+ for cb in callbacks:
+ with suppress(Exception):
+ cb()
+
+ def close(self) -> None:
+ self._notify_release()
+
+ if self._protocol is not None:
+ self._connector._release(self._key, self._protocol, should_close=True)
+ self._protocol = None
+
+ def release(self) -> None:
+ self._notify_release()
+
+ if self._protocol is not None:
+ self._connector._release(self._key, self._protocol)
+ self._protocol = None
+
+ @property
+ def closed(self) -> bool:
+ return self._protocol is None or not self._protocol.is_connected()
+
+
+class _ConnectTunnelConnection(Connection):
+ """Special connection wrapper for CONNECT tunnels that must never be pooled.
+
+ This connection wraps the proxy connection that will be upgraded with TLS.
+ It must never be released to the pool because:
+ 1. Its 'closed' future will never complete, causing session.close() to hang
+ 2. It represents an intermediate state, not a reusable connection
+ 3. The real connection (with TLS) will be created separately
+ """
+
+ def release(self) -> None:
+ """Do nothing - don't pool or close the connection.
+
+ These connections are an intermediate state during the CONNECT tunnel
+ setup and will be cleaned up naturally after the TLS upgrade. If they
+ were to be pooled, they would never be properly closed, causing
+ session.close() to wait forever for their 'closed' future.
+ """
+
+
+class _TransportPlaceholder:
+ """placeholder for BaseConnector.connect function"""
+
+ __slots__ = ("closed", "transport")
+
+ def __init__(self, closed_future: asyncio.Future[Optional[Exception]]) -> None:
+ """Initialize a placeholder for a transport."""
+ self.closed = closed_future
+ self.transport = None
+
+ def close(self) -> None:
+ """Close the placeholder."""
+
+ def abort(self) -> None:
+ """Abort the placeholder (does nothing)."""
+
+
+class BaseConnector:
+ """Base connector class.
+
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
+ Disabled by default.
+ timeout_ceil_threshold - Trigger ceiling of timeout values when
+ it's above timeout_ceil_threshold.
+ loop - Optional event loop.
+ """
+
+ _closed = True # prevent AttributeError in __del__ if ctor was failed
+ _source_traceback = None
+
+ # abort transport after 2 seconds (cleanup broken connections)
+ _cleanup_closed_period = 2.0
+
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET
+
+ def __init__(
+ self,
+ *,
+ keepalive_timeout: Union[object, None, float] = sentinel,
+ force_close: bool = False,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ enable_cleanup_closed: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ timeout_ceil_threshold: float = 5,
+ ) -> None:
+
+ if force_close:
+ if keepalive_timeout is not None and keepalive_timeout is not sentinel:
+ raise ValueError(
+ "keepalive_timeout cannot be set if force_close is True"
+ )
+ else:
+ if keepalive_timeout is sentinel:
+ keepalive_timeout = 15.0
+
+ loop = loop or asyncio.get_running_loop()
+ self._timeout_ceil_threshold = timeout_ceil_threshold
+
+ self._closed = False
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ # Connection pool of reusable connections.
+ # We use a deque to store connections because it has O(1) popleft()
+ # and O(1) append() operations to implement a FIFO queue.
+ self._conns: DefaultDict[
+ ConnectionKey, Deque[Tuple[ResponseHandler, float]]
+ ] = defaultdict(deque)
+ self._limit = limit
+ self._limit_per_host = limit_per_host
+ self._acquired: Set[ResponseHandler] = set()
+ self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = (
+ defaultdict(set)
+ )
+ self._keepalive_timeout = cast(float, keepalive_timeout)
+ self._force_close = force_close
+
+ # {host_key: FIFO list of waiters}
+ # The FIFO is implemented with an OrderedDict with None keys because
+ # python does not have an ordered set.
+ self._waiters: DefaultDict[
+ ConnectionKey, OrderedDict[asyncio.Future[None], None]
+ ] = defaultdict(OrderedDict)
+
+ self._loop = loop
+ self._factory = functools.partial(ResponseHandler, loop=loop)
+
+ # start keep-alive connection cleanup task
+ self._cleanup_handle: Optional[asyncio.TimerHandle] = None
+
+ # start cleanup closed transports task
+ self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
+
+ if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED:
+ warnings.warn(
+ "enable_cleanup_closed ignored because "
+ "https://github.com/python/cpython/pull/118960 is fixed "
+ f"in Python version {sys.version_info}",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ enable_cleanup_closed = False
+
+ self._cleanup_closed_disabled = not enable_cleanup_closed
+ self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
+ self._placeholder_future: asyncio.Future[Optional[Exception]] = (
+ loop.create_future()
+ )
+ self._placeholder_future.set_result(None)
+ self._cleanup_closed()
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
+ if not self._conns:
+ return
+
+ conns = [repr(c) for c in self._conns.values()]
+
+ self._close()
+
+ kwargs = {"source": self}
+ _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
+ context = {
+ "connector": self,
+ "connections": conns,
+ "message": "Unclosed connector",
+ }
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __enter__(self) -> "BaseConnector":
+ warnings.warn(
+ '"with Connector():" is deprecated, '
+ 'use "async with Connector():" instead',
+ DeprecationWarning,
+ )
+ return self
+
+ def __exit__(self, *exc: Any) -> None:
+ self._close()
+
+ async def __aenter__(self) -> "BaseConnector":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ exc_traceback: Optional[TracebackType] = None,
+ ) -> None:
+ await self.close()
+
+ @property
+ def force_close(self) -> bool:
+ """Ultimately close connection on releasing if True."""
+ return self._force_close
+
+ @property
+ def limit(self) -> int:
+ """The total number for simultaneous connections.
+
+ If limit is 0 the connector has no limit.
+ The default limit size is 100.
+ """
+ return self._limit
+
+ @property
+ def limit_per_host(self) -> int:
+ """The limit for simultaneous connections to the same endpoint.
+
+ Endpoints are the same if they are have equal
+ (host, port, is_ssl) triple.
+ """
+ return self._limit_per_host
+
+ def _cleanup(self) -> None:
+ """Cleanup unused transports."""
+ if self._cleanup_handle:
+ self._cleanup_handle.cancel()
+ # _cleanup_handle should be unset, otherwise _release() will not
+ # recreate it ever!
+ self._cleanup_handle = None
+
+ now = monotonic()
+ timeout = self._keepalive_timeout
+
+ if self._conns:
+ connections = defaultdict(deque)
+ deadline = now - timeout
+ for key, conns in self._conns.items():
+ alive: Deque[Tuple[ResponseHandler, float]] = deque()
+ for proto, use_time in conns:
+ if proto.is_connected() and use_time - deadline >= 0:
+ alive.append((proto, use_time))
+ continue
+ transport = proto.transport
+ proto.close()
+ if not self._cleanup_closed_disabled and key.is_ssl:
+ self._cleanup_closed_transports.append(transport)
+
+ if alive:
+ connections[key] = alive
+
+ self._conns = connections
+
+ if self._conns:
+ self._cleanup_handle = helpers.weakref_handle(
+ self,
+ "_cleanup",
+ timeout,
+ self._loop,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
+ )
+
+ def _cleanup_closed(self) -> None:
+ """Double confirmation for transport close.
+
+ Some broken ssl servers may leave socket open without proper close.
+ """
+ if self._cleanup_closed_handle:
+ self._cleanup_closed_handle.cancel()
+
+ for transport in self._cleanup_closed_transports:
+ if transport is not None:
+ transport.abort()
+
+ self._cleanup_closed_transports = []
+
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_handle = helpers.weakref_handle(
+ self,
+ "_cleanup_closed",
+ self._cleanup_closed_period,
+ self._loop,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
+ )
+
+ def close(self, *, abort_ssl: bool = False) -> Awaitable[None]:
+ """Close all opened transports.
+
+ :param abort_ssl: If True, SSL connections will be aborted immediately
+ without performing the shutdown handshake. This provides
+ faster cleanup at the cost of less graceful disconnection.
+ """
+ if not (waiters := self._close(abort_ssl=abort_ssl)):
+ # If there are no connections to close, we can return a noop
+ # awaitable to avoid scheduling a task on the event loop.
+ return _DeprecationWaiter(noop())
+ coro = _wait_for_close(waiters)
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to close connections
+ # immediately to avoid having to schedule the task on the event loop.
+ task = asyncio.Task(coro, loop=self._loop, eager_start=True)
+ else:
+ task = self._loop.create_task(coro)
+ return _DeprecationWaiter(task)
+
+ def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]:
+ waiters: List[Awaitable[object]] = []
+
+ if self._closed:
+ return waiters
+
+ self._closed = True
+
+ try:
+ if self._loop.is_closed():
+ return waiters
+
+ # cancel cleanup task
+ if self._cleanup_handle:
+ self._cleanup_handle.cancel()
+
+ # cancel cleanup close task
+ if self._cleanup_closed_handle:
+ self._cleanup_closed_handle.cancel()
+
+ for data in self._conns.values():
+ for proto, _ in data:
+ if (
+ abort_ssl
+ and proto.transport
+ and proto.transport.get_extra_info("sslcontext") is not None
+ ):
+ proto.abort()
+ else:
+ proto.close()
+ if closed := proto.closed:
+ waiters.append(closed)
+
+ for proto in self._acquired:
+ if (
+ abort_ssl
+ and proto.transport
+ and proto.transport.get_extra_info("sslcontext") is not None
+ ):
+ proto.abort()
+ else:
+ proto.close()
+ if closed := proto.closed:
+ waiters.append(closed)
+
+ for transport in self._cleanup_closed_transports:
+ if transport is not None:
+ transport.abort()
+
+ return waiters
+
+ finally:
+ self._conns.clear()
+ self._acquired.clear()
+ for keyed_waiters in self._waiters.values():
+ for keyed_waiter in keyed_waiters:
+ keyed_waiter.cancel()
+ self._waiters.clear()
+ self._cleanup_handle = None
+ self._cleanup_closed_transports.clear()
+ self._cleanup_closed_handle = None
+
+ @property
+ def closed(self) -> bool:
+ """Is connector closed.
+
+ A readonly property.
+ """
+ return self._closed
+
+ def _available_connections(self, key: "ConnectionKey") -> int:
+ """
+ Return number of available connections.
+
+ The limit, limit_per_host and the connection key are taken into account.
+
+ If it returns less than 1 means that there are no connections
+ available.
+ """
+ # check total available connections
+ # If there are no limits, this will always return 1
+ total_remain = 1
+
+ if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0:
+ return total_remain
+
+ # check limit per host
+ if host_remain := self._limit_per_host:
+ if acquired := self._acquired_per_host.get(key):
+ host_remain -= len(acquired)
+ if total_remain > host_remain:
+ return host_remain
+
+ return total_remain
+
+ def _update_proxy_auth_header_and_build_proxy_req(
+ self, req: ClientRequest
+ ) -> ClientRequest:
+ """Set Proxy-Authorization header for non-SSL proxy requests and builds the proxy request for SSL proxy requests."""
+ url = req.proxy
+ assert url is not None
+ headers: Dict[str, str] = {}
+ if req.proxy_headers is not None:
+ headers = req.proxy_headers # type: ignore[assignment]
+ headers[hdrs.HOST] = req.headers[hdrs.HOST]
+ proxy_req = ClientRequest(
+ hdrs.METH_GET,
+ url,
+ headers=headers,
+ auth=req.proxy_auth,
+ loop=self._loop,
+ ssl=req.ssl,
+ )
+ auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
+ if auth is not None:
+ if not req.is_ssl():
+ req.headers[hdrs.PROXY_AUTHORIZATION] = auth
+ else:
+ proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
+ return proxy_req
+
+ async def connect(
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> Connection:
+ """Get from pool or create new connection."""
+ key = req.connection_key
+ if (conn := await self._get(key, traces)) is not None:
+ # If we do not have to wait and we can get a connection from the pool
+ # we can avoid the timeout ceil logic and directly return the connection
+ if req.proxy:
+ self._update_proxy_auth_header_and_build_proxy_req(req)
+ return conn
+
+ async with ceil_timeout(timeout.connect, timeout.ceil_threshold):
+ if self._available_connections(key) <= 0:
+ await self._wait_for_available_connection(key, traces)
+ if (conn := await self._get(key, traces)) is not None:
+ if req.proxy:
+ self._update_proxy_auth_header_and_build_proxy_req(req)
+ return conn
+
+ placeholder = cast(
+ ResponseHandler, _TransportPlaceholder(self._placeholder_future)
+ )
+ self._acquired.add(placeholder)
+ if self._limit_per_host:
+ self._acquired_per_host[key].add(placeholder)
+
+ try:
+ # Traces are done inside the try block to ensure that the
+ # that the placeholder is still cleaned up if an exception
+ # is raised.
+ if traces:
+ for trace in traces:
+ await trace.send_connection_create_start()
+ proto = await self._create_connection(req, traces, timeout)
+ if traces:
+ for trace in traces:
+ await trace.send_connection_create_end()
+ except BaseException:
+ self._release_acquired(key, placeholder)
+ raise
+ else:
+ if self._closed:
+ proto.close()
+ raise ClientConnectionError("Connector is closed.")
+
+ # The connection was successfully created, drop the placeholder
+ # and add the real connection to the acquired set. There should
+ # be no awaits after the proto is added to the acquired set
+ # to ensure that the connection is not left in the acquired set
+ # on cancellation.
+ self._acquired.remove(placeholder)
+ self._acquired.add(proto)
+ if self._limit_per_host:
+ acquired_per_host = self._acquired_per_host[key]
+ acquired_per_host.remove(placeholder)
+ acquired_per_host.add(proto)
+ return Connection(self, key, proto, self._loop)
+
+ async def _wait_for_available_connection(
+ self, key: "ConnectionKey", traces: List["Trace"]
+ ) -> None:
+ """Wait for an available connection slot."""
+ # We loop here because there is a race between
+ # the connection limit check and the connection
+ # being acquired. If the connection is acquired
+ # between the check and the await statement, we
+ # need to loop again to check if the connection
+ # slot is still available.
+ attempts = 0
+ while True:
+ fut: asyncio.Future[None] = self._loop.create_future()
+ keyed_waiters = self._waiters[key]
+ keyed_waiters[fut] = None
+ if attempts:
+ # If we have waited before, we need to move the waiter
+ # to the front of the queue as otherwise we might get
+ # starved and hit the timeout.
+ keyed_waiters.move_to_end(fut, last=False)
+
+ try:
+ # Traces happen in the try block to ensure that the
+ # the waiter is still cleaned up if an exception is raised.
+ if traces:
+ for trace in traces:
+ await trace.send_connection_queued_start()
+ await fut
+ if traces:
+ for trace in traces:
+ await trace.send_connection_queued_end()
+ finally:
+ # pop the waiter from the queue if its still
+ # there and not already removed by _release_waiter
+ keyed_waiters.pop(fut, None)
+ if not self._waiters.get(key, True):
+ del self._waiters[key]
+
+ if self._available_connections(key) > 0:
+ break
+ attempts += 1
+
+ async def _get(
+ self, key: "ConnectionKey", traces: List["Trace"]
+ ) -> Optional[Connection]:
+ """Get next reusable connection for the key or None.
+
+ The connection will be marked as acquired.
+ """
+ if (conns := self._conns.get(key)) is None:
+ return None
+
+ t1 = monotonic()
+ while conns:
+ proto, t0 = conns.popleft()
+ # We will we reuse the connection if its connected and
+ # the keepalive timeout has not been exceeded
+ if proto.is_connected() and t1 - t0 <= self._keepalive_timeout:
+ if not conns:
+ # The very last connection was reclaimed: drop the key
+ del self._conns[key]
+ self._acquired.add(proto)
+ if self._limit_per_host:
+ self._acquired_per_host[key].add(proto)
+ if traces:
+ for trace in traces:
+ try:
+ await trace.send_connection_reuseconn()
+ except BaseException:
+ self._release_acquired(key, proto)
+ raise
+ return Connection(self, key, proto, self._loop)
+
+ # Connection cannot be reused, close it
+ transport = proto.transport
+ proto.close()
+ # only for SSL transports
+ if not self._cleanup_closed_disabled and key.is_ssl:
+ self._cleanup_closed_transports.append(transport)
+
+ # No more connections: drop the key
+ del self._conns[key]
+ return None
+
+ def _release_waiter(self) -> None:
+ """
+ Iterates over all waiters until one to be released is found.
+
+ The one to be released is not finished and
+ belongs to a host that has available connections.
+ """
+ if not self._waiters:
+ return
+
+ # Having the dict keys ordered this avoids to iterate
+ # at the same order at each call.
+ queues = list(self._waiters)
+ random.shuffle(queues)
+
+ for key in queues:
+ if self._available_connections(key) < 1:
+ continue
+
+ waiters = self._waiters[key]
+ while waiters:
+ waiter, _ = waiters.popitem(last=False)
+ if not waiter.done():
+ waiter.set_result(None)
+ return
+
+ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
+ """Release acquired connection."""
+ if self._closed:
+ # acquired connection is already released on connector closing
+ return
+
+ self._acquired.discard(proto)
+ if self._limit_per_host and (conns := self._acquired_per_host.get(key)):
+ conns.discard(proto)
+ if not conns:
+ del self._acquired_per_host[key]
+ self._release_waiter()
+
+ def _release(
+ self,
+ key: "ConnectionKey",
+ protocol: ResponseHandler,
+ *,
+ should_close: bool = False,
+ ) -> None:
+ if self._closed:
+ # acquired connection is already released on connector closing
+ return
+
+ self._release_acquired(key, protocol)
+
+ if self._force_close or should_close or protocol.should_close:
+ transport = protocol.transport
+ protocol.close()
+
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ return
+
+ self._conns[key].append((protocol, monotonic()))
+
+ if self._cleanup_handle is None:
+ self._cleanup_handle = helpers.weakref_handle(
+ self,
+ "_cleanup",
+ self._keepalive_timeout,
+ self._loop,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
+ )
+
+ async def _create_connection(
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ raise NotImplementedError()
+
+
+class _DNSCacheTable:
+ def __init__(self, ttl: Optional[float] = None) -> None:
+ self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {}
+ self._timestamps: Dict[Tuple[str, int], float] = {}
+ self._ttl = ttl
+
+ def __contains__(self, host: object) -> bool:
+ return host in self._addrs_rr
+
+ def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None:
+ self._addrs_rr[key] = (cycle(addrs), len(addrs))
+
+ if self._ttl is not None:
+ self._timestamps[key] = monotonic()
+
+ def remove(self, key: Tuple[str, int]) -> None:
+ self._addrs_rr.pop(key, None)
+
+ if self._ttl is not None:
+ self._timestamps.pop(key, None)
+
+ def clear(self) -> None:
+ self._addrs_rr.clear()
+ self._timestamps.clear()
+
+ def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]:
+ loop, length = self._addrs_rr[key]
+ addrs = list(islice(loop, length))
+ # Consume one more element to shift internal state of `cycle`
+ next(loop)
+ return addrs
+
+ def expired(self, key: Tuple[str, int]) -> bool:
+ if self._ttl is None:
+ return False
+
+ return self._timestamps[key] + self._ttl < monotonic()
+
+
+def _make_ssl_context(verified: bool) -> SSLContext:
+ """Create SSL context.
+
+ This method is not async-friendly and should be called from a thread
+ because it will load certificates from disk and do other blocking I/O.
+ """
+ if ssl is None:
+ # No ssl support
+ return None
+ if verified:
+ sslcontext = ssl.create_default_context()
+ else:
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ sslcontext.options |= ssl.OP_NO_SSLv2
+ sslcontext.options |= ssl.OP_NO_SSLv3
+ sslcontext.check_hostname = False
+ sslcontext.verify_mode = ssl.CERT_NONE
+ sslcontext.options |= ssl.OP_NO_COMPRESSION
+ sslcontext.set_default_verify_paths()
+ sslcontext.set_alpn_protocols(("http/1.1",))
+ return sslcontext
+
+
+# The default SSLContext objects are created at import time
+# since they do blocking I/O to load certificates from disk,
+# and imports should always be done before the event loop starts
+# or in a thread.
+_SSL_CONTEXT_VERIFIED = _make_ssl_context(True)
+_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False)
+
+
+class TCPConnector(BaseConnector):
+ """TCP connector.
+
+ verify_ssl - Set to True to check ssl certifications.
+ fingerprint - Pass the binary sha256
+ digest of the expected certificate in DER format to verify
+ that the certificate the server presents matches. See also
+ https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
+ resolver - Enable DNS lookups and use this
+ resolver
+ use_dns_cache - Use memory cache for DNS lookups.
+ ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
+ family - socket address family
+ local_addr - local tuple of (host, port) to bind socket to
+
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
+ Disabled by default.
+ happy_eyeballs_delay - This is the “Connection Attempt Delay”
+ as defined in RFC 8305. To disable
+ the happy eyeballs algorithm, set to None.
+ interleave - “First Address Family Count” as defined in RFC 8305
+ loop - Optional event loop.
+ socket_factory - A SocketFactoryType function that, if supplied,
+ will be used to create sockets given an
+ AddrInfoType.
+ ssl_shutdown_timeout - DEPRECATED. Will be removed in aiohttp 4.0.
+ Grace period for SSL shutdown handshake on TLS
+ connections. Default is 0 seconds (immediate abort).
+ This parameter allowed for a clean SSL shutdown by
+ notifying the remote peer of connection closure,
+ while avoiding excessive delays during connector cleanup.
+ Note: Only takes effect on Python 3.11+.
+ """
+
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"})
+
+ def __init__(
+ self,
+ *,
+ verify_ssl: bool = True,
+ fingerprint: Optional[bytes] = None,
+ use_dns_cache: bool = True,
+ ttl_dns_cache: Optional[int] = 10,
+ family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC,
+ ssl_context: Optional[SSLContext] = None,
+ ssl: Union[bool, Fingerprint, SSLContext] = True,
+ local_addr: Optional[Tuple[str, int]] = None,
+ resolver: Optional[AbstractResolver] = None,
+ keepalive_timeout: Union[None, float, object] = sentinel,
+ force_close: bool = False,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ enable_cleanup_closed: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ timeout_ceil_threshold: float = 5,
+ happy_eyeballs_delay: Optional[float] = 0.25,
+ interleave: Optional[int] = None,
+ socket_factory: Optional[SocketFactoryType] = None,
+ ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel,
+ ):
+ super().__init__(
+ keepalive_timeout=keepalive_timeout,
+ force_close=force_close,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ enable_cleanup_closed=enable_cleanup_closed,
+ loop=loop,
+ timeout_ceil_threshold=timeout_ceil_threshold,
+ )
+
+ self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ self._resolver: AbstractResolver
+ if resolver is None:
+ self._resolver = DefaultResolver(loop=self._loop)
+ self._resolver_owner = True
+ else:
+ self._resolver = resolver
+ self._resolver_owner = False
+
+ self._use_dns_cache = use_dns_cache
+ self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
+ self._throttle_dns_futures: Dict[
+ Tuple[str, int], Set["asyncio.Future[None]"]
+ ] = {}
+ self._family = family
+ self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr)
+ self._happy_eyeballs_delay = happy_eyeballs_delay
+ self._interleave = interleave
+ self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set()
+ self._socket_factory = socket_factory
+ self._ssl_shutdown_timeout: Optional[float]
+ # Handle ssl_shutdown_timeout with warning for Python < 3.11
+ if ssl_shutdown_timeout is sentinel:
+ self._ssl_shutdown_timeout = 0
+ else:
+ # Deprecation warning for ssl_shutdown_timeout parameter
+ warnings.warn(
+ "The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if (
+ sys.version_info < (3, 11)
+ and ssl_shutdown_timeout is not None
+ and ssl_shutdown_timeout != 0
+ ):
+ warnings.warn(
+ f"ssl_shutdown_timeout={ssl_shutdown_timeout} is ignored on Python < 3.11; "
+ "only ssl_shutdown_timeout=0 is supported. The timeout will be ignored.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+ self._ssl_shutdown_timeout = ssl_shutdown_timeout
+
+ def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]:
+ """Close all ongoing DNS calls."""
+ for fut in chain.from_iterable(self._throttle_dns_futures.values()):
+ fut.cancel()
+
+ waiters = super()._close(abort_ssl=abort_ssl)
+
+ for t in self._resolve_host_tasks:
+ t.cancel()
+ waiters.append(t)
+
+ return waiters
+
+ async def close(self, *, abort_ssl: bool = False) -> None:
+ """
+ Close all opened transports.
+
+ :param abort_ssl: If True, SSL connections will be aborted immediately
+ without performing the shutdown handshake. If False (default),
+ the behavior is determined by ssl_shutdown_timeout:
+ - If ssl_shutdown_timeout=0: connections are aborted
+ - If ssl_shutdown_timeout>0: graceful shutdown is performed
+ """
+ if self._resolver_owner:
+ await self._resolver.close()
+ # Use abort_ssl param if explicitly set, otherwise use ssl_shutdown_timeout default
+ await super().close(abort_ssl=abort_ssl or self._ssl_shutdown_timeout == 0)
+
+ @property
+ def family(self) -> int:
+ """Socket family like AF_INET."""
+ return self._family
+
+ @property
+ def use_dns_cache(self) -> bool:
+ """True if local DNS caching is enabled."""
+ return self._use_dns_cache
+
+ def clear_dns_cache(
+ self, host: Optional[str] = None, port: Optional[int] = None
+ ) -> None:
+ """Remove specified host/port or clear all dns local cache."""
+ if host is not None and port is not None:
+ self._cached_hosts.remove((host, port))
+ elif host is not None or port is not None:
+ raise ValueError("either both host and port or none of them are allowed")
+ else:
+ self._cached_hosts.clear()
+
+ async def _resolve_host(
+ self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None
+ ) -> List[ResolveResult]:
+ """Resolve host and return list of addresses."""
+ if is_ip_address(host):
+ return [
+ {
+ "hostname": host,
+ "host": host,
+ "port": port,
+ "family": self._family,
+ "proto": 0,
+ "flags": 0,
+ }
+ ]
+
+ if not self._use_dns_cache:
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_start(host)
+
+ res = await self._resolver.resolve(host, port, family=self._family)
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_end(host)
+
+ return res
+
+ key = (host, port)
+ if key in self._cached_hosts and not self._cached_hosts.expired(key):
+ # get result early, before any await (#4014)
+ result = self._cached_hosts.next_addrs(key)
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_hit(host)
+ return result
+
+ futures: Set["asyncio.Future[None]"]
+ #
+ # If multiple connectors are resolving the same host, we wait
+ # for the first one to resolve and then use the result for all of them.
+ # We use a throttle to ensure that we only resolve the host once
+ # and then use the result for all the waiters.
+ #
+ if key in self._throttle_dns_futures:
+ # get futures early, before any await (#4014)
+ futures = self._throttle_dns_futures[key]
+ future: asyncio.Future[None] = self._loop.create_future()
+ futures.add(future)
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_hit(host)
+ try:
+ await future
+ finally:
+ futures.discard(future)
+ return self._cached_hosts.next_addrs(key)
+
+ # update dict early, before any await (#4014)
+ self._throttle_dns_futures[key] = futures = set()
+ # In this case we need to create a task to ensure that we can shield
+ # the task from cancellation as cancelling this lookup should not cancel
+ # the underlying lookup or else the cancel event will get broadcast to
+ # all the waiters across all connections.
+ #
+ coro = self._resolve_host_with_throttle(key, host, port, futures, traces)
+ loop = asyncio.get_running_loop()
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to send immediately
+ resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True)
+ else:
+ resolved_host_task = loop.create_task(coro)
+
+ if not resolved_host_task.done():
+ self._resolve_host_tasks.add(resolved_host_task)
+ resolved_host_task.add_done_callback(self._resolve_host_tasks.discard)
+
+ try:
+ return await asyncio.shield(resolved_host_task)
+ except asyncio.CancelledError:
+
+ def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None:
+ with suppress(Exception, asyncio.CancelledError):
+ fut.result()
+
+ resolved_host_task.add_done_callback(drop_exception)
+ raise
+
+ async def _resolve_host_with_throttle(
+ self,
+ key: Tuple[str, int],
+ host: str,
+ port: int,
+ futures: Set["asyncio.Future[None]"],
+ traces: Optional[Sequence["Trace"]],
+ ) -> List[ResolveResult]:
+ """Resolve host and set result for all waiters.
+
+ This method must be run in a task and shielded from cancellation
+ to avoid cancelling the underlying lookup.
+ """
+ try:
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_miss(host)
+
+ for trace in traces:
+ await trace.send_dns_resolvehost_start(host)
+
+ addrs = await self._resolver.resolve(host, port, family=self._family)
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_end(host)
+
+ self._cached_hosts.add(key, addrs)
+ for fut in futures:
+ set_result(fut, None)
+ except BaseException as e:
+ # any DNS exception is set for the waiters to raise the same exception.
+ # This coro is always run in task that is shielded from cancellation so
+ # we should never be propagating cancellation here.
+ for fut in futures:
+ set_exception(fut, e)
+ raise
+ finally:
+ self._throttle_dns_futures.pop(key)
+
+ return self._cached_hosts.next_addrs(key)
+
+ async def _create_connection(
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ """Create connection.
+
+ Has same keyword arguments as BaseEventLoop.create_connection.
+ """
+ if req.proxy:
+ _, proto = await self._create_proxy_connection(req, traces, timeout)
+ else:
+ _, proto = await self._create_direct_connection(req, traces, timeout)
+
+ return proto
+
+ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
+ """Logic to get the correct SSL context
+
+ 0. if req.ssl is false, return None
+
+ 1. if ssl_context is specified in req, use it
+ 2. if _ssl_context is specified in self, use it
+ 3. otherwise:
+ 1. if verify_ssl is not specified in req, use self.ssl_context
+ (will generate a default context according to self.verify_ssl)
+ 2. if verify_ssl is True in req, generate a default SSL context
+ 3. if verify_ssl is False in req, generate a SSL context that
+ won't verify
+ """
+ if not req.is_ssl():
+ return None
+
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+ sslcontext = req.ssl
+ if isinstance(sslcontext, ssl.SSLContext):
+ return sslcontext
+ if sslcontext is not True:
+ # not verified or fingerprinted
+ return _SSL_CONTEXT_UNVERIFIED
+ sslcontext = self._ssl
+ if isinstance(sslcontext, ssl.SSLContext):
+ return sslcontext
+ if sslcontext is not True:
+ # not verified or fingerprinted
+ return _SSL_CONTEXT_UNVERIFIED
+ return _SSL_CONTEXT_VERIFIED
+
+ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
+ ret = req.ssl
+ if isinstance(ret, Fingerprint):
+ return ret
+ ret = self._ssl
+ if isinstance(ret, Fingerprint):
+ return ret
+ return None
+
+ async def _wrap_create_connection(
+ self,
+ *args: Any,
+ addr_infos: List[AddrInfoType],
+ req: ClientRequest,
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ **kwargs: Any,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ try:
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ sock = await aiohappyeyeballs.start_connection(
+ addr_infos=addr_infos,
+ local_addr_infos=self._local_addr_infos,
+ happy_eyeballs_delay=self._happy_eyeballs_delay,
+ interleave=self._interleave,
+ loop=self._loop,
+ socket_factory=self._socket_factory,
+ )
+ # Add ssl_shutdown_timeout for Python 3.11+ when SSL is used
+ if (
+ kwargs.get("ssl")
+ and self._ssl_shutdown_timeout
+ and sys.version_info >= (3, 11)
+ ):
+ kwargs["ssl_shutdown_timeout"] = self._ssl_shutdown_timeout
+ return await self._loop.create_connection(*args, **kwargs, sock=sock)
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise client_error(req.connection_key, exc) from exc
+
+ async def _wrap_existing_connection(
+ self,
+ *args: Any,
+ req: ClientRequest,
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ **kwargs: Any,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ try:
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ return await self._loop.create_connection(*args, **kwargs)
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise client_error(req.connection_key, exc) from exc
+
+ def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
+ """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
+
+ It is necessary for TLS-in-TLS so that it is possible to
+ send HTTPS queries through HTTPS proxies.
+
+ This doesn't affect regular HTTP requests, though.
+ """
+ if not req.is_ssl():
+ return
+
+ proxy_url = req.proxy
+ assert proxy_url is not None
+ if proxy_url.scheme != "https":
+ return
+
+ self._check_loop_for_start_tls()
+
+ def _check_loop_for_start_tls(self) -> None:
+ try:
+ self._loop.start_tls
+ except AttributeError as attr_exc:
+ raise RuntimeError(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This needs support for TLS in TLS but it is not implemented "
+ "in your runtime for the stdlib asyncio.\n\n"
+ "Please upgrade to Python 3.11 or higher. For more details, "
+ "please see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n"
+ "* https://docs.aiohttp.org/en/stable/"
+ "client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ ) from attr_exc
+
+ def _loop_supports_start_tls(self) -> bool:
+ try:
+ self._check_loop_for_start_tls()
+ except RuntimeError:
+ return False
+ else:
+ return True
+
+ def _warn_about_tls_in_tls(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: ClientRequest,
+ ) -> None:
+ """Issue a warning if the requested URL has HTTPS scheme."""
+ if req.request_info.url.scheme != "https":
+ return
+
+ # Check if uvloop is being used, which supports TLS in TLS,
+ # otherwise assume that asyncio's native transport is being used.
+ if type(underlying_transport).__module__.startswith("uvloop"):
+ return
+
+ # Support in asyncio was added in Python 3.11 (bpo-44011)
+ asyncio_supports_tls_in_tls = sys.version_info >= (3, 11) or getattr(
+ underlying_transport,
+ "_start_tls_compatible",
+ False,
+ )
+
+ if asyncio_supports_tls_in_tls:
+ return
+
+ warnings.warn(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This support for TLS in TLS is known to be disabled "
+ "in the stdlib asyncio (Python <3.11). This is why you'll probably see "
+ "an error in the log below.\n\n"
+ "It is possible to enable it via monkeypatching. "
+ "For more details, see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n\n"
+ "You can temporarily patch this as follows:\n"
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ RuntimeWarning,
+ source=self,
+ # Why `4`? At least 3 of the calls in the stack originate
+ # from the methods in this class.
+ stacklevel=3,
+ )
+
+ async def _start_tls_connection(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: ClientRequest,
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ """Wrap the raw TCP transport with TLS."""
+ tls_proto = self._factory() # Create a brand new proto for TLS
+ sslcontext = self._get_ssl_context(req)
+ if TYPE_CHECKING:
+ # _start_tls_connection is unreachable in the current code path
+ # if sslcontext is None.
+ assert sslcontext is not None
+
+ try:
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ try:
+ # ssl_shutdown_timeout is only available in Python 3.11+
+ if sys.version_info >= (3, 11) and self._ssl_shutdown_timeout:
+ tls_transport = await self._loop.start_tls(
+ underlying_transport,
+ tls_proto,
+ sslcontext,
+ server_hostname=req.server_hostname or req.host,
+ ssl_handshake_timeout=timeout.total,
+ ssl_shutdown_timeout=self._ssl_shutdown_timeout,
+ )
+ else:
+ tls_transport = await self._loop.start_tls(
+ underlying_transport,
+ tls_proto,
+ sslcontext,
+ server_hostname=req.server_hostname or req.host,
+ ssl_handshake_timeout=timeout.total,
+ )
+ except BaseException:
+ # We need to close the underlying transport since
+ # `start_tls()` probably failed before it had a
+ # chance to do this:
+ if self._ssl_shutdown_timeout == 0:
+ underlying_transport.abort()
+ else:
+ underlying_transport.close()
+ raise
+ if isinstance(tls_transport, asyncio.Transport):
+ fingerprint = self._get_fingerprint(req)
+ if fingerprint:
+ try:
+ fingerprint.check(tls_transport)
+ except ServerFingerprintMismatch:
+ tls_transport.close()
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(tls_transport)
+ raise
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise client_error(req.connection_key, exc) from exc
+ except TypeError as type_err:
+ # Example cause looks like this:
+ # TypeError: transport <asyncio.sslproto._SSLProtocolTransport
+ # object at 0x7f760615e460> is not supported by start_tls()
+
+ raise ClientConnectionError(
+ "Cannot initialize a TLS-in-TLS connection to host "
+ f"{req.host!s}:{req.port:d} through an underlying connection "
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
+ f"[{type_err!s}]"
+ ) from type_err
+ else:
+ if tls_transport is None:
+ msg = "Failed to start TLS (possibly caused by closing transport)"
+ raise client_error(req.connection_key, OSError(msg))
+ tls_proto.connection_made(
+ tls_transport
+ ) # Kick the state machine of the new TLS protocol
+
+ return tls_transport, tls_proto
+
+ def _convert_hosts_to_addr_infos(
+ self, hosts: List[ResolveResult]
+ ) -> List[AddrInfoType]:
+ """Converts the list of hosts to a list of addr_infos.
+
+ The list of hosts is the result of a DNS lookup. The list of
+ addr_infos is the result of a call to `socket.getaddrinfo()`.
+ """
+ addr_infos: List[AddrInfoType] = []
+ for hinfo in hosts:
+ host = hinfo["host"]
+ is_ipv6 = ":" in host
+ family = socket.AF_INET6 if is_ipv6 else socket.AF_INET
+ if self._family and self._family != family:
+ continue
+ addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"])
+ addr_infos.append(
+ (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)
+ )
+ return addr_infos
+
+ async def _create_direct_connection(
+ self,
+ req: ClientRequest,
+ traces: List["Trace"],
+ timeout: "ClientTimeout",
+ *,
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ sslcontext = self._get_ssl_context(req)
+ fingerprint = self._get_fingerprint(req)
+
+ host = req.url.raw_host
+ assert host is not None
+ # Replace multiple trailing dots with a single one.
+ # A trailing dot is only present for fully-qualified domain names.
+ # See https://github.com/aio-libs/aiohttp/pull/7364.
+ if host.endswith(".."):
+ host = host.rstrip(".") + "."
+ port = req.port
+ assert port is not None
+ try:
+ # Cancelling this lookup should not cancel the underlying lookup
+ # or else the cancel event will get broadcast to all the waiters
+ # across all connections.
+ hosts = await self._resolve_host(host, port, traces=traces)
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ # in case of proxy it is not ClientProxyConnectionError
+ # it is problem of resolving proxy ip itself
+ raise ClientConnectorDNSError(req.connection_key, exc) from exc
+
+ last_exc: Optional[Exception] = None
+ addr_infos = self._convert_hosts_to_addr_infos(hosts)
+ while addr_infos:
+ # Strip trailing dots, certificates contain FQDN without dots.
+ # See https://github.com/aio-libs/aiohttp/issues/3636
+ server_hostname = (
+ (req.server_hostname or host).rstrip(".") if sslcontext else None
+ )
+
+ try:
+ transp, proto = await self._wrap_create_connection(
+ self._factory,
+ timeout=timeout,
+ ssl=sslcontext,
+ addr_infos=addr_infos,
+ server_hostname=server_hostname,
+ req=req,
+ client_error=client_error,
+ )
+ except (ClientConnectorError, asyncio.TimeoutError) as exc:
+ last_exc = exc
+ aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave)
+ continue
+
+ if req.is_ssl() and fingerprint:
+ try:
+ fingerprint.check(transp)
+ except ServerFingerprintMismatch as exc:
+ transp.close()
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transp)
+ last_exc = exc
+ # Remove the bad peer from the list of addr_infos
+ sock: socket.socket = transp.get_extra_info("socket")
+ bad_peer = sock.getpeername()
+ aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer)
+ continue
+
+ return transp, proto
+ else:
+ assert last_exc is not None
+ raise last_exc
+
+ async def _create_proxy_connection(
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ self._fail_on_no_start_tls(req)
+ runtime_has_start_tls = self._loop_supports_start_tls()
+ proxy_req = self._update_proxy_auth_header_and_build_proxy_req(req)
+
+ # create connection to proxy server
+ transport, proto = await self._create_direct_connection(
+ proxy_req, [], timeout, client_error=ClientProxyConnectionError
+ )
+
+ if req.is_ssl():
+ if runtime_has_start_tls:
+ self._warn_about_tls_in_tls(transport, req)
+
+ # For HTTPS requests over HTTP proxy
+ # we must notify proxy to tunnel connection
+ # so we send CONNECT command:
+ # CONNECT www.python.org:443 HTTP/1.1
+ # Host: www.python.org
+ #
+ # next we must do TLS handshake and so on
+ # to do this we must wrap raw socket into secure one
+ # asyncio handles this perfectly
+ proxy_req.method = hdrs.METH_CONNECT
+ proxy_req.url = req.url
+ key = req.connection_key._replace(
+ proxy=None, proxy_auth=None, proxy_headers_hash=None
+ )
+ conn = _ConnectTunnelConnection(self, key, proto, self._loop)
+ proxy_resp = await proxy_req.send(conn)
+ try:
+ protocol = conn._protocol
+ assert protocol is not None
+
+ # read_until_eof=True will ensure the connection isn't closed
+ # once the response is received and processed allowing
+ # START_TLS to work on the connection below.
+ protocol.set_response_params(
+ read_until_eof=runtime_has_start_tls,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
+ )
+ resp = await proxy_resp.start(conn)
+ except BaseException:
+ proxy_resp.close()
+ conn.close()
+ raise
+ else:
+ conn._protocol = None
+ try:
+ if resp.status != 200:
+ message = resp.reason
+ if message is None:
+ message = HTTPStatus(resp.status).phrase
+ raise ClientHttpProxyError(
+ proxy_resp.request_info,
+ resp.history,
+ status=resp.status,
+ message=message,
+ headers=resp.headers,
+ )
+ if not runtime_has_start_tls:
+ rawsock = transport.get_extra_info("socket", default=None)
+ if rawsock is None:
+ raise RuntimeError(
+ "Transport does not expose socket instance"
+ )
+ # Duplicate the socket, so now we can close proxy transport
+ rawsock = rawsock.dup()
+ except BaseException:
+ # It shouldn't be closed in `finally` because it's fed to
+ # `loop.start_tls()` and the docs say not to touch it after
+ # passing there.
+ transport.close()
+ raise
+ finally:
+ if not runtime_has_start_tls:
+ transport.close()
+
+ if not runtime_has_start_tls:
+ # HTTP proxy with support for upgrade to HTTPS
+ sslcontext = self._get_ssl_context(req)
+ return await self._wrap_existing_connection(
+ self._factory,
+ timeout=timeout,
+ ssl=sslcontext,
+ sock=rawsock,
+ server_hostname=req.host,
+ req=req,
+ )
+
+ return await self._start_tls_connection(
+ # Access the old transport for the last time before it's
+ # closed and forgotten forever:
+ transport,
+ req=req,
+ timeout=timeout,
+ )
+ finally:
+ proxy_resp.close()
+
+ return transport, proto
+
+
+class UnixConnector(BaseConnector):
+ """Unix socket connector.
+
+ path - Unix socket path.
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ loop - Optional event loop.
+ """
+
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"})
+
+ def __init__(
+ self,
+ path: str,
+ force_close: bool = False,
+ keepalive_timeout: Union[object, float, None] = sentinel,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(
+ force_close=force_close,
+ keepalive_timeout=keepalive_timeout,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ loop=loop,
+ )
+ self._path = path
+
+ @property
+ def path(self) -> str:
+ """Path to unix socket."""
+ return self._path
+
+ async def _create_connection(
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ try:
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ _, proto = await self._loop.create_unix_connection(
+ self._factory, self._path
+ )
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
+
+ return proto
+
+
+class NamedPipeConnector(BaseConnector):
+ """Named pipe connector.
+
+ Only supported by the proactor event loop.
+ See also: https://docs.python.org/3/library/asyncio-eventloop.html
+
+ path - Windows named pipe path.
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ loop - Optional event loop.
+ """
+
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"})
+
+ def __init__(
+ self,
+ path: str,
+ force_close: bool = False,
+ keepalive_timeout: Union[object, float, None] = sentinel,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(
+ force_close=force_close,
+ keepalive_timeout=keepalive_timeout,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ loop=loop,
+ )
+ if not isinstance(
+ self._loop,
+ asyncio.ProactorEventLoop, # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor loop under windows"
+ )
+ self._path = path
+
+ @property
+ def path(self) -> str:
+ """Path to the named pipe."""
+ return self._path
+
+ async def _create_connection(
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ try:
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
+ self._factory, self._path
+ )
+ # the drain is required so that the connection_made is called
+ # and transport is set otherwise it is not set before the
+ # `assert conn.transport is not None`
+ # in client.py's _request method
+ await asyncio.sleep(0)
+ # other option is to manually set transport like
+ # `proto.transport = trans`
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise ClientConnectorError(req.connection_key, exc) from exc
+
+ return cast(ResponseHandler, proto)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/cookiejar.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/cookiejar.py"
new file mode 100644
index 0000000..193648d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/cookiejar.py"
@@ -0,0 +1,522 @@
+import asyncio
+import calendar
+import contextlib
+import datetime
+import heapq
+import itertools
+import os # noqa
+import pathlib
+import pickle
+import re
+import time
+import warnings
+from collections import defaultdict
+from collections.abc import Mapping
+from http.cookies import BaseCookie, Morsel, SimpleCookie
+from typing import (
+ DefaultDict,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+)
+
+from yarl import URL
+
+from ._cookie_helpers import preserve_morsel_with_coded_value
+from .abc import AbstractCookieJar, ClearCookiePredicate
+from .helpers import is_ip_address
+from .typedefs import LooseCookies, PathLike, StrOrURL
+
+__all__ = ("CookieJar", "DummyCookieJar")
+
+
+CookieItem = Union[str, "Morsel[str]"]
+
+# We cache these string methods here as their use is in performance critical code.
+_FORMAT_PATH = "{}/{}".format
+_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format
+
+# The minimum number of scheduled cookie expirations before we start cleaning up
+# the expiration heap. This is a performance optimization to avoid cleaning up the
+# heap too often when there are only a few scheduled expirations.
+_MIN_SCHEDULED_COOKIE_EXPIRATION = 100
+_SIMPLE_COOKIE = SimpleCookie()
+
+
+class CookieJar(AbstractCookieJar):
+ """Implements cookie storage adhering to RFC 6265."""
+
+ DATE_TOKENS_RE = re.compile(
+ r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
+ r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
+ )
+
+ DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
+
+ DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
+
+ DATE_MONTH_RE = re.compile(
+ "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)",
+ re.I,
+ )
+
+ DATE_YEAR_RE = re.compile(r"(\d{2,4})")
+
+ # calendar.timegm() fails for timestamps after datetime.datetime.max
+ # Minus one as a loss of precision occurs when timestamp() is called.
+ MAX_TIME = (
+ int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
+ )
+ try:
+ calendar.timegm(time.gmtime(MAX_TIME))
+ except (OSError, ValueError):
+ # Hit the maximum representable time on Windows
+ # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
+ # Throws ValueError on PyPy 3.9, OSError elsewhere
+ MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
+ except OverflowError:
+ # #4515: datetime.max may not be representable on 32-bit platforms
+ MAX_TIME = 2**31 - 1
+ # Avoid minuses in the future, 3x faster
+ SUB_MAX_TIME = MAX_TIME - 1
+
+ def __init__(
+ self,
+ *,
+ unsafe: bool = False,
+ quote_cookie: bool = True,
+ treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(loop=loop)
+ self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
+ SimpleCookie
+ )
+ self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = (
+ defaultdict(dict)
+ )
+ self._host_only_cookies: Set[Tuple[str, str]] = set()
+ self._unsafe = unsafe
+ self._quote_cookie = quote_cookie
+ if treat_as_secure_origin is None:
+ treat_as_secure_origin = []
+ elif isinstance(treat_as_secure_origin, URL):
+ treat_as_secure_origin = [treat_as_secure_origin.origin()]
+ elif isinstance(treat_as_secure_origin, str):
+ treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
+ else:
+ treat_as_secure_origin = [
+ URL(url).origin() if isinstance(url, str) else url.origin()
+ for url in treat_as_secure_origin
+ ]
+ self._treat_as_secure_origin = treat_as_secure_origin
+ self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = []
+ self._expirations: Dict[Tuple[str, str, str], float] = {}
+
+ @property
+ def quote_cookie(self) -> bool:
+ return self._quote_cookie
+
+ def save(self, file_path: PathLike) -> None:
+ file_path = pathlib.Path(file_path)
+ with file_path.open(mode="wb") as f:
+ pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
+
+ def load(self, file_path: PathLike) -> None:
+ file_path = pathlib.Path(file_path)
+ with file_path.open(mode="rb") as f:
+ self._cookies = pickle.load(f)
+
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ if predicate is None:
+ self._expire_heap.clear()
+ self._cookies.clear()
+ self._morsel_cache.clear()
+ self._host_only_cookies.clear()
+ self._expirations.clear()
+ return
+
+ now = time.time()
+ to_del = [
+ key
+ for (domain, path), cookie in self._cookies.items()
+ for name, morsel in cookie.items()
+ if (
+ (key := (domain, path, name)) in self._expirations
+ and self._expirations[key] <= now
+ )
+ or predicate(morsel)
+ ]
+ if to_del:
+ self._delete_cookies(to_del)
+
+ def clear_domain(self, domain: str) -> None:
+ self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
+
+ def __iter__(self) -> "Iterator[Morsel[str]]":
+ self._do_expiration()
+ for val in self._cookies.values():
+ yield from val.values()
+
+ def __len__(self) -> int:
+ """Return number of cookies.
+
+ This function does not iterate self to avoid unnecessary expiration
+ checks.
+ """
+ return sum(len(cookie.values()) for cookie in self._cookies.values())
+
+ def _do_expiration(self) -> None:
+ """Remove expired cookies."""
+ if not (expire_heap_len := len(self._expire_heap)):
+ return
+
+ # If the expiration heap grows larger than the number expirations
+ # times two, we clean it up to avoid keeping expired entries in
+ # the heap and consuming memory. We guard this with a minimum
+ # threshold to avoid cleaning up the heap too often when there are
+ # only a few scheduled expirations.
+ if (
+ expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION
+ and expire_heap_len > len(self._expirations) * 2
+ ):
+ # Remove any expired entries from the expiration heap
+ # that do not match the expiration time in the expirations
+ # as it means the cookie has been re-added to the heap
+ # with a different expiration time.
+ self._expire_heap = [
+ entry
+ for entry in self._expire_heap
+ if self._expirations.get(entry[1]) == entry[0]
+ ]
+ heapq.heapify(self._expire_heap)
+
+ now = time.time()
+ to_del: List[Tuple[str, str, str]] = []
+ # Find any expired cookies and add them to the to-delete list
+ while self._expire_heap:
+ when, cookie_key = self._expire_heap[0]
+ if when > now:
+ break
+ heapq.heappop(self._expire_heap)
+ # Check if the cookie hasn't been re-added to the heap
+ # with a different expiration time as it will be removed
+ # later when it reaches the top of the heap and its
+ # expiration time is met.
+ if self._expirations.get(cookie_key) == when:
+ to_del.append(cookie_key)
+
+ if to_del:
+ self._delete_cookies(to_del)
+
+ def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None:
+ for domain, path, name in to_del:
+ self._host_only_cookies.discard((domain, name))
+ self._cookies[(domain, path)].pop(name, None)
+ self._morsel_cache[(domain, path)].pop(name, None)
+ self._expirations.pop((domain, path, name), None)
+
+ def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
+ cookie_key = (domain, path, name)
+ if self._expirations.get(cookie_key) == when:
+ # Avoid adding duplicates to the heap
+ return
+ heapq.heappush(self._expire_heap, (when, cookie_key))
+ self._expirations[cookie_key] = when
+
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+ hostname = response_url.raw_host
+
+ if not self._unsafe and is_ip_address(hostname):
+ # Don't accept cookies from IPs
+ return
+
+ if isinstance(cookies, Mapping):
+ cookies = cookies.items()
+
+ for name, cookie in cookies:
+ if not isinstance(cookie, Morsel):
+ tmp = SimpleCookie()
+ tmp[name] = cookie # type: ignore[assignment]
+ cookie = tmp[name]
+
+ domain = cookie["domain"]
+
+ # ignore domains with trailing dots
+ if domain and domain[-1] == ".":
+ domain = ""
+ del cookie["domain"]
+
+ if not domain and hostname is not None:
+ # Set the cookie's domain to the response hostname
+ # and set its host-only-flag
+ self._host_only_cookies.add((hostname, name))
+ domain = cookie["domain"] = hostname
+
+ if domain and domain[0] == ".":
+ # Remove leading dot
+ domain = domain[1:]
+ cookie["domain"] = domain
+
+ if hostname and not self._is_domain_match(domain, hostname):
+ # Setting cookies for different domains is not allowed
+ continue
+
+ path = cookie["path"]
+ if not path or path[0] != "/":
+ # Set the cookie's path to the response path
+ path = response_url.path
+ if not path.startswith("/"):
+ path = "/"
+ else:
+ # Cut everything from the last slash to the end
+ path = "/" + path[1 : path.rfind("/")]
+ cookie["path"] = path
+ path = path.rstrip("/")
+
+ if max_age := cookie["max-age"]:
+ try:
+ delta_seconds = int(max_age)
+ max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
+ self._expire_cookie(max_age_expiration, domain, path, name)
+ except ValueError:
+ cookie["max-age"] = ""
+
+ elif expires := cookie["expires"]:
+ if expire_time := self._parse_date(expires):
+ self._expire_cookie(expire_time, domain, path, name)
+ else:
+ cookie["expires"] = ""
+
+ key = (domain, path)
+ if self._cookies[key].get(name) != cookie:
+ # Don't blow away the cache if the same
+ # cookie gets set again
+ self._cookies[key][name] = cookie
+ self._morsel_cache[key].pop(name, None)
+
+ self._do_expiration()
+
+ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
+ """Returns this jar's cookies filtered by their attributes."""
+ # We always use BaseCookie now since all
+ # cookies set on on filtered are fully constructed
+ # Morsels, not just names and values.
+ filtered: BaseCookie[str] = BaseCookie()
+ if not self._cookies:
+ # Skip do_expiration() if there are no cookies.
+ return filtered
+ self._do_expiration()
+ if not self._cookies:
+ # Skip rest of function if no non-expired cookies.
+ return filtered
+ if type(request_url) is not URL:
+ warnings.warn(
+ "filter_cookies expects yarl.URL instances only,"
+ f"and will stop working in 4.x, got {type(request_url)}",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ request_url = URL(request_url)
+ hostname = request_url.raw_host or ""
+
+ is_not_secure = request_url.scheme not in ("https", "wss")
+ if is_not_secure and self._treat_as_secure_origin:
+ request_origin = URL()
+ with contextlib.suppress(ValueError):
+ request_origin = request_url.origin()
+ is_not_secure = request_origin not in self._treat_as_secure_origin
+
+ # Send shared cookie
+ key = ("", "")
+ for c in self._cookies[key].values():
+ # Check cache first
+ if c.key in self._morsel_cache[key]:
+ filtered[c.key] = self._morsel_cache[key][c.key]
+ continue
+
+ # Build and cache the morsel
+ mrsl_val = self._build_morsel(c)
+ self._morsel_cache[key][c.key] = mrsl_val
+ filtered[c.key] = mrsl_val
+
+ if is_ip_address(hostname):
+ if not self._unsafe:
+ return filtered
+ domains: Iterable[str] = (hostname,)
+ else:
+ # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com")
+ domains = itertools.accumulate(
+ reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED
+ )
+
+ # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar")
+ paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH)
+ # Create every combination of (domain, path) pairs.
+ pairs = itertools.product(domains, paths)
+
+ path_len = len(request_url.path)
+ # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
+ for p in pairs:
+ if p not in self._cookies:
+ continue
+ for name, cookie in self._cookies[p].items():
+ domain = cookie["domain"]
+
+ if (domain, name) in self._host_only_cookies and domain != hostname:
+ continue
+
+ # Skip edge case when the cookie has a trailing slash but request doesn't.
+ if len(cookie["path"]) > path_len:
+ continue
+
+ if is_not_secure and cookie["secure"]:
+ continue
+
+ # We already built the Morsel so reuse it here
+ if name in self._morsel_cache[p]:
+ filtered[name] = self._morsel_cache[p][name]
+ continue
+
+ # Build and cache the morsel
+ mrsl_val = self._build_morsel(cookie)
+ self._morsel_cache[p][name] = mrsl_val
+ filtered[name] = mrsl_val
+
+ return filtered
+
+ def _build_morsel(self, cookie: Morsel[str]) -> Morsel[str]:
+ """Build a morsel for sending, respecting quote_cookie setting."""
+ if self._quote_cookie and cookie.coded_value and cookie.coded_value[0] == '"':
+ return preserve_morsel_with_coded_value(cookie)
+ morsel: Morsel[str] = Morsel()
+ if self._quote_cookie:
+ value, coded_value = _SIMPLE_COOKIE.value_encode(cookie.value)
+ else:
+ coded_value = value = cookie.value
+ # We use __setstate__ instead of the public set() API because it allows us to
+ # bypass validation and set already validated state. This is more stable than
+ # setting protected attributes directly and unlikely to change since it would
+ # break pickling.
+ morsel.__setstate__({"key": cookie.key, "value": value, "coded_value": coded_value}) # type: ignore[attr-defined]
+ return morsel
+
+ @staticmethod
+ def _is_domain_match(domain: str, hostname: str) -> bool:
+ """Implements domain matching adhering to RFC 6265."""
+ if hostname == domain:
+ return True
+
+ if not hostname.endswith(domain):
+ return False
+
+ non_matching = hostname[: -len(domain)]
+
+ if not non_matching.endswith("."):
+ return False
+
+ return not is_ip_address(hostname)
+
+ @classmethod
+ def _parse_date(cls, date_str: str) -> Optional[int]:
+ """Implements date string parsing adhering to RFC 6265."""
+ if not date_str:
+ return None
+
+ found_time = False
+ found_day = False
+ found_month = False
+ found_year = False
+
+ hour = minute = second = 0
+ day = 0
+ month = 0
+ year = 0
+
+ for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
+
+ token = token_match.group("token")
+
+ if not found_time:
+ time_match = cls.DATE_HMS_TIME_RE.match(token)
+ if time_match:
+ found_time = True
+ hour, minute, second = (int(s) for s in time_match.groups())
+ continue
+
+ if not found_day:
+ day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
+ if day_match:
+ found_day = True
+ day = int(day_match.group())
+ continue
+
+ if not found_month:
+ month_match = cls.DATE_MONTH_RE.match(token)
+ if month_match:
+ found_month = True
+ assert month_match.lastindex is not None
+ month = month_match.lastindex
+ continue
+
+ if not found_year:
+ year_match = cls.DATE_YEAR_RE.match(token)
+ if year_match:
+ found_year = True
+ year = int(year_match.group())
+
+ if 70 <= year <= 99:
+ year += 1900
+ elif 0 <= year <= 69:
+ year += 2000
+
+ if False in (found_day, found_month, found_year, found_time):
+ return None
+
+ if not 1 <= day <= 31:
+ return None
+
+ if year < 1601 or hour > 23 or minute > 59 or second > 59:
+ return None
+
+ return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
+
+
+class DummyCookieJar(AbstractCookieJar):
+ """Implements a dummy cookie storage.
+
+ It can be used with the ClientSession when no cookie processing is needed.
+
+ """
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ super().__init__(loop=loop)
+
+ def __iter__(self) -> "Iterator[Morsel[str]]":
+ while False:
+ yield None
+
+ def __len__(self) -> int:
+ return 0
+
+ @property
+ def quote_cookie(self) -> bool:
+ return True
+
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ pass
+
+ def clear_domain(self, domain: str) -> None:
+ pass
+
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ pass
+
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ return SimpleCookie()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/formdata.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/formdata.py"
new file mode 100644
index 0000000..a5a4f60
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/formdata.py"
@@ -0,0 +1,179 @@
+import io
+import warnings
+from typing import Any, Iterable, List, Optional
+from urllib.parse import urlencode
+
+from multidict import MultiDict, MultiDictProxy
+
+from . import hdrs, multipart, payload
+from .helpers import guess_filename
+from .payload import Payload
+
+__all__ = ("FormData",)
+
+
+class FormData:
+ """Helper class for form body generation.
+
+ Supports multipart/form-data and application/x-www-form-urlencoded.
+ """
+
+ def __init__(
+ self,
+ fields: Iterable[Any] = (),
+ quote_fields: bool = True,
+ charset: Optional[str] = None,
+ *,
+ default_to_multipart: bool = False,
+ ) -> None:
+ self._writer = multipart.MultipartWriter("form-data")
+ self._fields: List[Any] = []
+ self._is_multipart = default_to_multipart
+ self._quote_fields = quote_fields
+ self._charset = charset
+
+ if isinstance(fields, dict):
+ fields = list(fields.items())
+ elif not isinstance(fields, (list, tuple)):
+ fields = (fields,)
+ self.add_fields(*fields)
+
+ @property
+ def is_multipart(self) -> bool:
+ return self._is_multipart
+
+ def add_field(
+ self,
+ name: str,
+ value: Any,
+ *,
+ content_type: Optional[str] = None,
+ filename: Optional[str] = None,
+ content_transfer_encoding: Optional[str] = None,
+ ) -> None:
+
+ if isinstance(value, io.IOBase):
+ self._is_multipart = True
+ elif isinstance(value, (bytes, bytearray, memoryview)):
+ msg = (
+ "In v4, passing bytes will no longer create a file field. "
+ "Please explicitly use the filename parameter or pass a BytesIO object."
+ )
+ if filename is None and content_transfer_encoding is None:
+ warnings.warn(msg, DeprecationWarning)
+ filename = name
+
+ type_options: MultiDict[str] = MultiDict({"name": name})
+ if filename is not None and not isinstance(filename, str):
+ raise TypeError("filename must be an instance of str. Got: %s" % filename)
+ if filename is None and isinstance(value, io.IOBase):
+ filename = guess_filename(value, name)
+ if filename is not None:
+ type_options["filename"] = filename
+ self._is_multipart = True
+
+ headers = {}
+ if content_type is not None:
+ if not isinstance(content_type, str):
+ raise TypeError(
+ "content_type must be an instance of str. Got: %s" % content_type
+ )
+ headers[hdrs.CONTENT_TYPE] = content_type
+ self._is_multipart = True
+ if content_transfer_encoding is not None:
+ if not isinstance(content_transfer_encoding, str):
+ raise TypeError(
+ "content_transfer_encoding must be an instance"
+ " of str. Got: %s" % content_transfer_encoding
+ )
+ msg = (
+ "content_transfer_encoding is deprecated. "
+ "To maintain compatibility with v4 please pass a BytesPayload."
+ )
+ warnings.warn(msg, DeprecationWarning)
+ self._is_multipart = True
+
+ self._fields.append((type_options, headers, value))
+
+ def add_fields(self, *fields: Any) -> None:
+ to_add = list(fields)
+
+ while to_add:
+ rec = to_add.pop(0)
+
+ if isinstance(rec, io.IOBase):
+ k = guess_filename(rec, "unknown")
+ self.add_field(k, rec) # type: ignore[arg-type]
+
+ elif isinstance(rec, (MultiDictProxy, MultiDict)):
+ to_add.extend(rec.items())
+
+ elif isinstance(rec, (list, tuple)) and len(rec) == 2:
+ k, fp = rec
+ self.add_field(k, fp)
+
+ else:
+ raise TypeError(
+ "Only io.IOBase, multidict and (name, file) "
+ "pairs allowed, use .add_field() for passing "
+ "more complex parameters, got {!r}".format(rec)
+ )
+
+ def _gen_form_urlencoded(self) -> payload.BytesPayload:
+ # form data (x-www-form-urlencoded)
+ data = []
+ for type_options, _, value in self._fields:
+ data.append((type_options["name"], value))
+
+ charset = self._charset if self._charset is not None else "utf-8"
+
+ if charset == "utf-8":
+ content_type = "application/x-www-form-urlencoded"
+ else:
+ content_type = "application/x-www-form-urlencoded; charset=%s" % charset
+
+ return payload.BytesPayload(
+ urlencode(data, doseq=True, encoding=charset).encode(),
+ content_type=content_type,
+ )
+
+ def _gen_form_data(self) -> multipart.MultipartWriter:
+ """Encode a list of fields using the multipart/form-data MIME format"""
+ for dispparams, headers, value in self._fields:
+ try:
+ if hdrs.CONTENT_TYPE in headers:
+ part = payload.get_payload(
+ value,
+ content_type=headers[hdrs.CONTENT_TYPE],
+ headers=headers,
+ encoding=self._charset,
+ )
+ else:
+ part = payload.get_payload(
+ value, headers=headers, encoding=self._charset
+ )
+ except Exception as exc:
+ raise TypeError(
+ "Can not serialize value type: %r\n "
+ "headers: %r\n value: %r" % (type(value), headers, value)
+ ) from exc
+
+ if dispparams:
+ part.set_content_disposition(
+ "form-data", quote_fields=self._quote_fields, **dispparams
+ )
+ # FIXME cgi.FieldStorage doesn't likes body parts with
+ # Content-Length which were sent via chunked transfer encoding
+ assert part.headers is not None
+ part.headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ self._writer.append_payload(part)
+
+ self._fields.clear()
+ return self._writer
+
+ def __call__(self) -> Payload:
+ if self._is_multipart:
+ return self._gen_form_data()
+ else:
+ return self._gen_form_urlencoded()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/hdrs.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/hdrs.py"
new file mode 100644
index 0000000..c8d6b35
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/hdrs.py"
@@ -0,0 +1,121 @@
+"""HTTP Headers constants."""
+
+# After changing the file content call ./tools/gen.py
+# to regenerate the headers parser
+import itertools
+from typing import Final, Set
+
+from multidict import istr
+
+METH_ANY: Final[str] = "*"
+METH_CONNECT: Final[str] = "CONNECT"
+METH_HEAD: Final[str] = "HEAD"
+METH_GET: Final[str] = "GET"
+METH_DELETE: Final[str] = "DELETE"
+METH_OPTIONS: Final[str] = "OPTIONS"
+METH_PATCH: Final[str] = "PATCH"
+METH_POST: Final[str] = "POST"
+METH_PUT: Final[str] = "PUT"
+METH_TRACE: Final[str] = "TRACE"
+
+METH_ALL: Final[Set[str]] = {
+ METH_CONNECT,
+ METH_HEAD,
+ METH_GET,
+ METH_DELETE,
+ METH_OPTIONS,
+ METH_PATCH,
+ METH_POST,
+ METH_PUT,
+ METH_TRACE,
+}
+
+ACCEPT: Final[istr] = istr("Accept")
+ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
+ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
+ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
+ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
+ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
+ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
+ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
+ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
+ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
+ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
+ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
+ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
+AGE: Final[istr] = istr("Age")
+ALLOW: Final[istr] = istr("Allow")
+AUTHORIZATION: Final[istr] = istr("Authorization")
+CACHE_CONTROL: Final[istr] = istr("Cache-Control")
+CONNECTION: Final[istr] = istr("Connection")
+CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
+CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
+CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
+CONTENT_LENGTH: Final[istr] = istr("Content-Length")
+CONTENT_LOCATION: Final[istr] = istr("Content-Location")
+CONTENT_MD5: Final[istr] = istr("Content-MD5")
+CONTENT_RANGE: Final[istr] = istr("Content-Range")
+CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
+CONTENT_TYPE: Final[istr] = istr("Content-Type")
+COOKIE: Final[istr] = istr("Cookie")
+DATE: Final[istr] = istr("Date")
+DESTINATION: Final[istr] = istr("Destination")
+DIGEST: Final[istr] = istr("Digest")
+ETAG: Final[istr] = istr("Etag")
+EXPECT: Final[istr] = istr("Expect")
+EXPIRES: Final[istr] = istr("Expires")
+FORWARDED: Final[istr] = istr("Forwarded")
+FROM: Final[istr] = istr("From")
+HOST: Final[istr] = istr("Host")
+IF_MATCH: Final[istr] = istr("If-Match")
+IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
+IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
+IF_RANGE: Final[istr] = istr("If-Range")
+IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
+KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
+LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
+LAST_MODIFIED: Final[istr] = istr("Last-Modified")
+LINK: Final[istr] = istr("Link")
+LOCATION: Final[istr] = istr("Location")
+MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
+ORIGIN: Final[istr] = istr("Origin")
+PRAGMA: Final[istr] = istr("Pragma")
+PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
+PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
+RANGE: Final[istr] = istr("Range")
+REFERER: Final[istr] = istr("Referer")
+RETRY_AFTER: Final[istr] = istr("Retry-After")
+SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
+SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
+SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
+SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
+SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
+SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
+SERVER: Final[istr] = istr("Server")
+SET_COOKIE: Final[istr] = istr("Set-Cookie")
+TE: Final[istr] = istr("TE")
+TRAILER: Final[istr] = istr("Trailer")
+TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
+UPGRADE: Final[istr] = istr("Upgrade")
+URI: Final[istr] = istr("URI")
+USER_AGENT: Final[istr] = istr("User-Agent")
+VARY: Final[istr] = istr("Vary")
+VIA: Final[istr] = istr("Via")
+WANT_DIGEST: Final[istr] = istr("Want-Digest")
+WARNING: Final[istr] = istr("Warning")
+WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
+X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
+X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
+X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
+
+# These are the upper/lower case variants of the headers/methods
+# Example: {'hOst', 'host', 'HoST', 'HOSt', 'hOsT', 'HosT', 'hoSt', ...}
+METH_HEAD_ALL: Final = frozenset(
+ map("".join, itertools.product(*zip(METH_HEAD.upper(), METH_HEAD.lower())))
+)
+METH_CONNECT_ALL: Final = frozenset(
+ map("".join, itertools.product(*zip(METH_CONNECT.upper(), METH_CONNECT.lower())))
+)
+HOST_ALL: Final = frozenset(
+ map("".join, itertools.product(*zip(HOST.upper(), HOST.lower())))
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/helpers.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/helpers.py"
new file mode 100644
index 0000000..dfab987
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/helpers.py"
@@ -0,0 +1,986 @@
+"""Various helper functions"""
+
+import asyncio
+import base64
+import binascii
+import contextlib
+import datetime
+import enum
+import functools
+import inspect
+import netrc
+import os
+import platform
+import re
+import sys
+import time
+import weakref
+from collections import namedtuple
+from contextlib import suppress
+from email.message import EmailMessage
+from email.parser import HeaderParser
+from email.policy import HTTP
+from email.utils import parsedate
+from math import ceil
+from pathlib import Path
+from types import MappingProxyType, TracebackType
+from typing import (
+ Any,
+ Callable,
+ ContextManager,
+ Dict,
+ Generator,
+ Generic,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Protocol,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ get_args,
+ overload,
+)
+from urllib.parse import quote
+from urllib.request import getproxies, proxy_bypass
+
+import attr
+from multidict import MultiDict, MultiDictProxy, MultiMapping
+from propcache.api import under_cached_property as reify
+from yarl import URL
+
+from . import hdrs
+from .log import client_logger
+
+if sys.version_info >= (3, 11):
+ import asyncio as async_timeout
+else:
+ import async_timeout
+
+__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify")
+
+IS_MACOS = platform.system() == "Darwin"
+IS_WINDOWS = platform.system() == "Windows"
+
+PY_310 = sys.version_info >= (3, 10)
+PY_311 = sys.version_info >= (3, 11)
+
+
+_T = TypeVar("_T")
+_S = TypeVar("_S")
+
+_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
+sentinel = _SENTINEL.sentinel
+
+NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
+
+# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
+EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200)))
+# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
+# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
+EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL
+
+DEBUG = sys.flags.dev_mode or (
+ not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
+)
+
+
+CHAR = {chr(i) for i in range(0, 128)}
+CTL = {chr(i) for i in range(0, 32)} | {
+ chr(127),
+}
+SEPARATORS = {
+ "(",
+ ")",
+ "<",
+ ">",
+ "@",
+ ",",
+ ";",
+ ":",
+ "\\",
+ '"',
+ "/",
+ "[",
+ "]",
+ "?",
+ "=",
+ "{",
+ "}",
+ " ",
+ chr(9),
+}
+TOKEN = CHAR ^ CTL ^ SEPARATORS
+
+
+class noop:
+ def __await__(self) -> Generator[None, None, None]:
+ yield
+
+
+class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
+ """Http basic authentication helper."""
+
+ def __new__(
+ cls, login: str, password: str = "", encoding: str = "latin1"
+ ) -> "BasicAuth":
+ if login is None:
+ raise ValueError("None is not allowed as login value")
+
+ if password is None:
+ raise ValueError("None is not allowed as password value")
+
+ if ":" in login:
+ raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
+
+ return super().__new__(cls, login, password, encoding)
+
+ @classmethod
+ def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
+ """Create a BasicAuth object from an Authorization HTTP header."""
+ try:
+ auth_type, encoded_credentials = auth_header.split(" ", 1)
+ except ValueError:
+ raise ValueError("Could not parse authorization header.")
+
+ if auth_type.lower() != "basic":
+ raise ValueError("Unknown authorization method %s" % auth_type)
+
+ try:
+ decoded = base64.b64decode(
+ encoded_credentials.encode("ascii"), validate=True
+ ).decode(encoding)
+ except binascii.Error:
+ raise ValueError("Invalid base64 encoding.")
+
+ try:
+ # RFC 2617 HTTP Authentication
+ # https://www.ietf.org/rfc/rfc2617.txt
+ # the colon must be present, but the username and password may be
+ # otherwise blank.
+ username, password = decoded.split(":", 1)
+ except ValueError:
+ raise ValueError("Invalid credentials.")
+
+ return cls(username, password, encoding=encoding)
+
+ @classmethod
+ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
+ """Create BasicAuth from url."""
+ if not isinstance(url, URL):
+ raise TypeError("url should be yarl.URL instance")
+ # Check raw_user and raw_password first as yarl is likely
+ # to already have these values parsed from the netloc in the cache.
+ if url.raw_user is None and url.raw_password is None:
+ return None
+ return cls(url.user or "", url.password or "", encoding=encoding)
+
+ def encode(self) -> str:
+ """Encode credentials."""
+ creds = (f"{self.login}:{self.password}").encode(self.encoding)
+ return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
+
+
+def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
+ """Remove user and password from URL if present and return BasicAuth object."""
+ # Check raw_user and raw_password first as yarl is likely
+ # to already have these values parsed from the netloc in the cache.
+ if url.raw_user is None and url.raw_password is None:
+ return url, None
+ return url.with_user(None), BasicAuth(url.user or "", url.password or "")
+
+
+def netrc_from_env() -> Optional[netrc.netrc]:
+ """Load netrc from file.
+
+ Attempt to load it from the path specified by the env-var
+ NETRC or in the default location in the user's home directory.
+
+ Returns None if it couldn't be found or fails to parse.
+ """
+ netrc_env = os.environ.get("NETRC")
+
+ if netrc_env is not None:
+ netrc_path = Path(netrc_env)
+ else:
+ try:
+ home_dir = Path.home()
+ except RuntimeError as e: # pragma: no cover
+ # if pathlib can't resolve home, it may raise a RuntimeError
+ client_logger.debug(
+ "Could not resolve home directory when "
+ "trying to look for .netrc file: %s",
+ e,
+ )
+ return None
+
+ netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
+
+ try:
+ return netrc.netrc(str(netrc_path))
+ except netrc.NetrcParseError as e:
+ client_logger.warning("Could not parse .netrc file: %s", e)
+ except OSError as e:
+ netrc_exists = False
+ with contextlib.suppress(OSError):
+ netrc_exists = netrc_path.is_file()
+ # we couldn't read the file (doesn't exist, permissions, etc.)
+ if netrc_env or netrc_exists:
+ # only warn if the environment wanted us to load it,
+ # or it appears like the default file does actually exist
+ client_logger.warning("Could not read .netrc file: %s", e)
+
+ return None
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ProxyInfo:
+ proxy: URL
+ proxy_auth: Optional[BasicAuth]
+
+
+def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
+ """
+ Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
+
+ :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
+ entry is found for the ``host``.
+ """
+ if netrc_obj is None:
+ raise LookupError("No .netrc file found")
+ auth_from_netrc = netrc_obj.authenticators(host)
+
+ if auth_from_netrc is None:
+ raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
+ login, account, password = auth_from_netrc
+
+ # TODO(PY311): username = login or account
+ # Up to python 3.10, account could be None if not specified,
+ # and login will be empty string if not specified. From 3.11,
+ # login and account will be empty string if not specified.
+ username = login if (login or account is None) else account
+
+ # TODO(PY311): Remove this, as password will be empty string
+ # if not specified
+ if password is None:
+ password = ""
+
+ return BasicAuth(username, password)
+
+
+def proxies_from_env() -> Dict[str, ProxyInfo]:
+ proxy_urls = {
+ k: URL(v)
+ for k, v in getproxies().items()
+ if k in ("http", "https", "ws", "wss")
+ }
+ netrc_obj = netrc_from_env()
+ stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
+ ret = {}
+ for proto, val in stripped.items():
+ proxy, auth = val
+ if proxy.scheme in ("https", "wss"):
+ client_logger.warning(
+ "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
+ )
+ continue
+ if netrc_obj and auth is None:
+ if proxy.host is not None:
+ try:
+ auth = basicauth_from_netrc(netrc_obj, proxy.host)
+ except LookupError:
+ auth = None
+ ret[proto] = ProxyInfo(proxy, auth)
+ return ret
+
+
+def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
+ """Get a permitted proxy for the given URL from the env."""
+ if url.host is not None and proxy_bypass(url.host):
+ raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
+
+ proxies_in_env = proxies_from_env()
+ try:
+ proxy_info = proxies_in_env[url.scheme]
+ except KeyError:
+ raise LookupError(f"No proxies found for `{url!s}` in the env")
+ else:
+ return proxy_info.proxy, proxy_info.proxy_auth
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class MimeType:
+ type: str
+ subtype: str
+ suffix: str
+ parameters: "MultiDictProxy[str]"
+
+
+@functools.lru_cache(maxsize=56)
+def parse_mimetype(mimetype: str) -> MimeType:
+ """Parses a MIME type into its components.
+
+ mimetype is a MIME type string.
+
+ Returns a MimeType object.
+
+ Example:
+
+ >>> parse_mimetype('text/html; charset=utf-8')
+ MimeType(type='text', subtype='html', suffix='',
+ parameters={'charset': 'utf-8'})
+
+ """
+ if not mimetype:
+ return MimeType(
+ type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
+ )
+
+ parts = mimetype.split(";")
+ params: MultiDict[str] = MultiDict()
+ for item in parts[1:]:
+ if not item:
+ continue
+ key, _, value = item.partition("=")
+ params.add(key.lower().strip(), value.strip(' "'))
+
+ fulltype = parts[0].strip().lower()
+ if fulltype == "*":
+ fulltype = "*/*"
+
+ mtype, _, stype = fulltype.partition("/")
+ stype, _, suffix = stype.partition("+")
+
+ return MimeType(
+ type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
+ )
+
+
+class EnsureOctetStream(EmailMessage):
+ def __init__(self) -> None:
+ super().__init__()
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
+ self.set_default_type("application/octet-stream")
+
+ def get_content_type(self) -> str:
+ """Re-implementation from Message
+
+ Returns application/octet-stream in place of plain/text when
+ value is wrong.
+
+ The way this class is used guarantees that content-type will
+ be present so simplify the checks wrt to the base implementation.
+ """
+ value = self.get("content-type", "").lower()
+
+ # Based on the implementation of _splitparam in the standard library
+ ctype, _, _ = value.partition(";")
+ ctype = ctype.strip()
+ if ctype.count("/") != 1:
+ return self.get_default_type()
+ return ctype
+
+
+@functools.lru_cache(maxsize=56)
+def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]:
+ """Parse Content-Type header.
+
+ Returns a tuple of the parsed content type and a
+ MappingProxyType of parameters. The default returned value
+ is `application/octet-stream`
+ """
+ msg = HeaderParser(EnsureOctetStream, policy=HTTP).parsestr(f"Content-Type: {raw}")
+ content_type = msg.get_content_type()
+ params = msg.get_params(())
+ content_dict = dict(params[1:]) # First element is content type again
+ return content_type, MappingProxyType(content_dict)
+
+
+def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
+ name = getattr(obj, "name", None)
+ if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
+ return Path(name).name
+ return default
+
+
+not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
+QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
+
+
+def quoted_string(content: str) -> str:
+ """Return 7-bit content as quoted-string.
+
+ Format content into a quoted-string as defined in RFC5322 for
+ Internet Message Format. Notice that this is not the 8-bit HTTP
+ format, but the 7-bit email format. Content must be in usascii or
+ a ValueError is raised.
+ """
+ if not (QCONTENT > set(content)):
+ raise ValueError(f"bad content for quoted-string {content!r}")
+ return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
+
+
+def content_disposition_header(
+ disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
+) -> str:
+ """Sets ``Content-Disposition`` header for MIME.
+
+ This is the MIME payload Content-Disposition header from RFC 2183
+ and RFC 7579 section 4.2, not the HTTP Content-Disposition from
+ RFC 6266.
+
+ disptype is a disposition type: inline, attachment, form-data.
+ Should be valid extension token (see RFC 2183)
+
+ quote_fields performs value quoting to 7-bit MIME headers
+ according to RFC 7578. Set to quote_fields to False if recipient
+ can take 8-bit file names and field values.
+
+ _charset specifies the charset to use when quote_fields is True.
+
+ params is a dict with disposition params.
+ """
+ if not disptype or not (TOKEN > set(disptype)):
+ raise ValueError(f"bad content disposition type {disptype!r}")
+
+ value = disptype
+ if params:
+ lparams = []
+ for key, val in params.items():
+ if not key or not (TOKEN > set(key)):
+ raise ValueError(f"bad content disposition parameter {key!r}={val!r}")
+ if quote_fields:
+ if key.lower() == "filename":
+ qval = quote(val, "", encoding=_charset)
+ lparams.append((key, '"%s"' % qval))
+ else:
+ try:
+ qval = quoted_string(val)
+ except ValueError:
+ qval = "".join(
+ (_charset, "''", quote(val, "", encoding=_charset))
+ )
+ lparams.append((key + "*", qval))
+ else:
+ lparams.append((key, '"%s"' % qval))
+ else:
+ qval = val.replace("\\", "\\\\").replace('"', '\\"')
+ lparams.append((key, '"%s"' % qval))
+ sparams = "; ".join("=".join(pair) for pair in lparams)
+ value = "; ".join((value, sparams))
+ return value
+
+
+def is_ip_address(host: Optional[str]) -> bool:
+ """Check if host looks like an IP Address.
+
+ This check is only meant as a heuristic to ensure that
+ a host is not a domain name.
+ """
+ if not host:
+ return False
+ # For a host to be an ipv4 address, it must be all numeric.
+ # The host must contain a colon to be an IPv6 address.
+ return ":" in host or host.replace(".", "").isdigit()
+
+
+_cached_current_datetime: Optional[int] = None
+_cached_formatted_datetime = ""
+
+
+def rfc822_formatted_time() -> str:
+ global _cached_current_datetime
+ global _cached_formatted_datetime
+
+ now = int(time.time())
+ if now != _cached_current_datetime:
+ # Weekday and month names for HTTP date/time formatting;
+ # always English!
+ # Tuples are constants stored in codeobject!
+ _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
+ _monthname = (
+ "", # Dummy so we can use 1-based month numbers
+ "Jan",
+ "Feb",
+ "Mar",
+ "Apr",
+ "May",
+ "Jun",
+ "Jul",
+ "Aug",
+ "Sep",
+ "Oct",
+ "Nov",
+ "Dec",
+ )
+
+ year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
+ _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+ _weekdayname[wd],
+ day,
+ _monthname[month],
+ year,
+ hh,
+ mm,
+ ss,
+ )
+ _cached_current_datetime = now
+ return _cached_formatted_datetime
+
+
+def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
+ ref, name = info
+ ob = ref()
+ if ob is not None:
+ with suppress(Exception):
+ getattr(ob, name)()
+
+
+def weakref_handle(
+ ob: object,
+ name: str,
+ timeout: float,
+ loop: asyncio.AbstractEventLoop,
+ timeout_ceil_threshold: float = 5,
+) -> Optional[asyncio.TimerHandle]:
+ if timeout is not None and timeout > 0:
+ when = loop.time() + timeout
+ if timeout >= timeout_ceil_threshold:
+ when = ceil(when)
+
+ return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
+ return None
+
+
+def call_later(
+ cb: Callable[[], Any],
+ timeout: float,
+ loop: asyncio.AbstractEventLoop,
+ timeout_ceil_threshold: float = 5,
+) -> Optional[asyncio.TimerHandle]:
+ if timeout is None or timeout <= 0:
+ return None
+ now = loop.time()
+ when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
+ return loop.call_at(when, cb)
+
+
+def calculate_timeout_when(
+ loop_time: float,
+ timeout: float,
+ timeout_ceiling_threshold: float,
+) -> float:
+ """Calculate when to execute a timeout."""
+ when = loop_time + timeout
+ if timeout > timeout_ceiling_threshold:
+ return ceil(when)
+ return when
+
+
+class TimeoutHandle:
+ """Timeout handle"""
+
+ __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks")
+
+ def __init__(
+ self,
+ loop: asyncio.AbstractEventLoop,
+ timeout: Optional[float],
+ ceil_threshold: float = 5,
+ ) -> None:
+ self._timeout = timeout
+ self._loop = loop
+ self._ceil_threshold = ceil_threshold
+ self._callbacks: List[
+ Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
+ ] = []
+
+ def register(
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
+ ) -> None:
+ self._callbacks.append((callback, args, kwargs))
+
+ def close(self) -> None:
+ self._callbacks.clear()
+
+ def start(self) -> Optional[asyncio.TimerHandle]:
+ timeout = self._timeout
+ if timeout is not None and timeout > 0:
+ when = self._loop.time() + timeout
+ if timeout >= self._ceil_threshold:
+ when = ceil(when)
+ return self._loop.call_at(when, self.__call__)
+ else:
+ return None
+
+ def timer(self) -> "BaseTimerContext":
+ if self._timeout is not None and self._timeout > 0:
+ timer = TimerContext(self._loop)
+ self.register(timer.timeout)
+ return timer
+ else:
+ return TimerNoop()
+
+ def __call__(self) -> None:
+ for cb, args, kwargs in self._callbacks:
+ with suppress(Exception):
+ cb(*args, **kwargs)
+
+ self._callbacks.clear()
+
+
+class BaseTimerContext(ContextManager["BaseTimerContext"]):
+
+ __slots__ = ()
+
+ def assert_timeout(self) -> None:
+ """Raise TimeoutError if timeout has been exceeded."""
+
+
+class TimerNoop(BaseTimerContext):
+
+ __slots__ = ()
+
+ def __enter__(self) -> BaseTimerContext:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ return
+
+
+class TimerContext(BaseTimerContext):
+ """Low resolution timeout context manager"""
+
+ __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling")
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._tasks: List[asyncio.Task[Any]] = []
+ self._cancelled = False
+ self._cancelling = 0
+
+ def assert_timeout(self) -> None:
+ """Raise TimeoutError if timer has already been cancelled."""
+ if self._cancelled:
+ raise asyncio.TimeoutError from None
+
+ def __enter__(self) -> BaseTimerContext:
+ task = asyncio.current_task(loop=self._loop)
+ if task is None:
+ raise RuntimeError("Timeout context manager should be used inside a task")
+
+ if sys.version_info >= (3, 11):
+ # Remember if the task was already cancelling
+ # so when we __exit__ we can decide if we should
+ # raise asyncio.TimeoutError or let the cancellation propagate
+ self._cancelling = task.cancelling()
+
+ if self._cancelled:
+ raise asyncio.TimeoutError from None
+
+ self._tasks.append(task)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> Optional[bool]:
+ enter_task: Optional[asyncio.Task[Any]] = None
+ if self._tasks:
+ enter_task = self._tasks.pop()
+
+ if exc_type is asyncio.CancelledError and self._cancelled:
+ assert enter_task is not None
+ # The timeout was hit, and the task was cancelled
+ # so we need to uncancel the last task that entered the context manager
+ # since the cancellation should not leak out of the context manager
+ if sys.version_info >= (3, 11):
+ # If the task was already cancelling don't raise
+ # asyncio.TimeoutError and instead return None
+ # to allow the cancellation to propagate
+ if enter_task.uncancel() > self._cancelling:
+ return None
+ raise asyncio.TimeoutError from exc_val
+ return None
+
+ def timeout(self) -> None:
+ if not self._cancelled:
+ for task in set(self._tasks):
+ task.cancel()
+
+ self._cancelled = True
+
+
+def ceil_timeout(
+ delay: Optional[float], ceil_threshold: float = 5
+) -> async_timeout.Timeout:
+ if delay is None or delay <= 0:
+ return async_timeout.timeout(None)
+
+ loop = asyncio.get_running_loop()
+ now = loop.time()
+ when = now + delay
+ if delay > ceil_threshold:
+ when = ceil(when)
+ return async_timeout.timeout_at(when)
+
+
+class HeadersMixin:
+ """Mixin for handling headers."""
+
+ ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
+
+ _headers: MultiMapping[str]
+ _content_type: Optional[str] = None
+ _content_dict: Optional[Dict[str, str]] = None
+ _stored_content_type: Union[str, None, _SENTINEL] = sentinel
+
+ def _parse_content_type(self, raw: Optional[str]) -> None:
+ self._stored_content_type = raw
+ if raw is None:
+ # default value according to RFC 2616
+ self._content_type = "application/octet-stream"
+ self._content_dict = {}
+ else:
+ content_type, content_mapping_proxy = parse_content_type(raw)
+ self._content_type = content_type
+ # _content_dict needs to be mutable so we can update it
+ self._content_dict = content_mapping_proxy.copy()
+
+ @property
+ def content_type(self) -> str:
+ """The value of content part for Content-Type HTTP header."""
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
+ if self._stored_content_type != raw:
+ self._parse_content_type(raw)
+ assert self._content_type is not None
+ return self._content_type
+
+ @property
+ def charset(self) -> Optional[str]:
+ """The value of charset part for Content-Type HTTP header."""
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
+ if self._stored_content_type != raw:
+ self._parse_content_type(raw)
+ assert self._content_dict is not None
+ return self._content_dict.get("charset")
+
+ @property
+ def content_length(self) -> Optional[int]:
+ """The value of Content-Length HTTP header."""
+ content_length = self._headers.get(hdrs.CONTENT_LENGTH)
+ return None if content_length is None else int(content_length)
+
+
+def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
+ if not fut.done():
+ fut.set_result(result)
+
+
+_EXC_SENTINEL = BaseException()
+
+
+class ErrorableProtocol(Protocol):
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = ...,
+ ) -> None: ... # pragma: no cover
+
+
+def set_exception(
+ fut: "asyncio.Future[_T] | ErrorableProtocol",
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+) -> None:
+ """Set future exception.
+
+ If the future is marked as complete, this function is a no-op.
+
+ :param exc_cause: An exception that is a direct cause of ``exc``.
+ Only set if provided.
+ """
+ if asyncio.isfuture(fut) and fut.done():
+ return
+
+ exc_is_sentinel = exc_cause is _EXC_SENTINEL
+ exc_causes_itself = exc is exc_cause
+ if not exc_is_sentinel and not exc_causes_itself:
+ exc.__cause__ = exc_cause
+
+ fut.set_exception(exc)
+
+
+@functools.total_ordering
+class AppKey(Generic[_T]):
+ """Keys for static typing support in Application."""
+
+ __slots__ = ("_name", "_t", "__orig_class__")
+
+ # This may be set by Python when instantiating with a generic type. We need to
+ # support this, in order to support types that are not concrete classes,
+ # like Iterable, which can't be passed as the second parameter to __init__.
+ __orig_class__: Type[object]
+
+ def __init__(self, name: str, t: Optional[Type[_T]] = None):
+ # Prefix with module name to help deduplicate key names.
+ frame = inspect.currentframe()
+ while frame:
+ if frame.f_code.co_name == "<module>":
+ module: str = frame.f_globals["__name__"]
+ break
+ frame = frame.f_back
+
+ self._name = module + "." + name
+ self._t = t
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, AppKey):
+ return self._name < other._name
+ return True # Order AppKey above other types.
+
+ def __repr__(self) -> str:
+ t = self._t
+ if t is None:
+ with suppress(AttributeError):
+ # Set to type arg.
+ t = get_args(self.__orig_class__)[0]
+
+ if t is None:
+ t_repr = "<<Unknown>>"
+ elif isinstance(t, type):
+ if t.__module__ == "builtins":
+ t_repr = t.__qualname__
+ else:
+ t_repr = f"{t.__module__}.{t.__qualname__}"
+ else:
+ t_repr = repr(t)
+ return f"<AppKey({self._name}, type={t_repr})>"
+
+
+class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
+ __slots__ = ("_maps",)
+
+ def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
+ self._maps = tuple(maps)
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(
+ "Inheritance class {} from ChainMapProxy "
+ "is forbidden".format(cls.__name__)
+ )
+
+ @overload # type: ignore[override]
+ def __getitem__(self, key: AppKey[_T]) -> _T: ...
+
+ @overload
+ def __getitem__(self, key: str) -> Any: ...
+
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
+ for mapping in self._maps:
+ try:
+ return mapping[key]
+ except KeyError:
+ pass
+ raise KeyError(key)
+
+ @overload # type: ignore[override]
+ def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ...
+
+ @overload
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
+
+ @overload
+ def get(self, key: str, default: Any = ...) -> Any: ...
+
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __len__(self) -> int:
+ # reuses stored hash values if possible
+ return len(set().union(*self._maps))
+
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
+ d: Dict[Union[str, AppKey[Any]], Any] = {}
+ for mapping in reversed(self._maps):
+ # reuses stored hash values if possible
+ d.update(mapping)
+ return iter(d)
+
+ def __contains__(self, key: object) -> bool:
+ return any(key in m for m in self._maps)
+
+ def __bool__(self) -> bool:
+ return any(self._maps)
+
+ def __repr__(self) -> str:
+ content = ", ".join(map(repr, self._maps))
+ return f"ChainMapProxy({content})"
+
+
+# https://tools.ietf.org/html/rfc7232#section-2.3
+_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
+_ETAGC_RE = re.compile(_ETAGC)
+_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
+QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
+LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
+
+ETAG_ANY = "*"
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ETag:
+ value: str
+ is_weak: bool = False
+
+
+def validate_etag_value(value: str) -> None:
+ if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
+ raise ValueError(
+ f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
+ )
+
+
+def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
+ """Process a date string, return a datetime object"""
+ if date_str is not None:
+ timetuple = parsedate(date_str)
+ if timetuple is not None:
+ with suppress(ValueError):
+ return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
+ return None
+
+
+@functools.lru_cache
+def must_be_empty_body(method: str, code: int) -> bool:
+ """Check if a request must return an empty body."""
+ return (
+ code in EMPTY_BODY_STATUS_CODES
+ or method in EMPTY_BODY_METHODS
+ or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL)
+ )
+
+
+def should_remove_content_length(method: str, code: int) -> bool:
+ """Check if a Content-Length header should be removed.
+
+ This should always be a subset of must_be_empty_body
+ """
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
+ return code in EMPTY_BODY_STATUS_CODES or (
+ 200 <= code < 300 and method in hdrs.METH_CONNECT_ALL
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http.py"
new file mode 100644
index 0000000..a1feae2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http.py"
@@ -0,0 +1,72 @@
+import sys
+from http import HTTPStatus
+from typing import Mapping, Tuple
+
+from . import __version__
+from .http_exceptions import HttpProcessingError as HttpProcessingError
+from .http_parser import (
+ HeadersParser as HeadersParser,
+ HttpParser as HttpParser,
+ HttpRequestParser as HttpRequestParser,
+ HttpResponseParser as HttpResponseParser,
+ RawRequestMessage as RawRequestMessage,
+ RawResponseMessage as RawResponseMessage,
+)
+from .http_websocket import (
+ WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
+ WS_KEY as WS_KEY,
+ WebSocketError as WebSocketError,
+ WebSocketReader as WebSocketReader,
+ WebSocketWriter as WebSocketWriter,
+ WSCloseCode as WSCloseCode,
+ WSMessage as WSMessage,
+ WSMsgType as WSMsgType,
+ ws_ext_gen as ws_ext_gen,
+ ws_ext_parse as ws_ext_parse,
+)
+from .http_writer import (
+ HttpVersion as HttpVersion,
+ HttpVersion10 as HttpVersion10,
+ HttpVersion11 as HttpVersion11,
+ StreamWriter as StreamWriter,
+)
+
+__all__ = (
+ "HttpProcessingError",
+ "RESPONSES",
+ "SERVER_SOFTWARE",
+ # .http_writer
+ "StreamWriter",
+ "HttpVersion",
+ "HttpVersion10",
+ "HttpVersion11",
+ # .http_parser
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+ # .http_websocket
+ "WS_CLOSED_MESSAGE",
+ "WS_CLOSING_MESSAGE",
+ "WS_KEY",
+ "WebSocketReader",
+ "WebSocketWriter",
+ "ws_ext_gen",
+ "ws_ext_parse",
+ "WSMessage",
+ "WebSocketError",
+ "WSMsgType",
+ "WSCloseCode",
+)
+
+
+SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
+ sys.version_info, __version__
+)
+
+RESPONSES: Mapping[int, Tuple[str, str]] = {
+ v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
+}
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_exceptions.py"
new file mode 100644
index 0000000..0b5867c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_exceptions.py"
@@ -0,0 +1,116 @@
+"""Low-level http related exceptions."""
+
+from textwrap import indent
+from typing import Optional, Union
+
+from .typedefs import _CIMultiDict
+
+__all__ = ("HttpProcessingError",)
+
+
+class HttpProcessingError(Exception):
+ """HTTP error.
+
+ Shortcut for raising HTTP errors with custom code, message and headers.
+
+ code: HTTP Error code.
+ message: (optional) Error message.
+ headers: (optional) Headers to be sent in response, a list of pairs
+ """
+
+ code = 0
+ message = ""
+ headers = None
+
+ def __init__(
+ self,
+ *,
+ code: Optional[int] = None,
+ message: str = "",
+ headers: Optional[_CIMultiDict] = None,
+ ) -> None:
+ if code is not None:
+ self.code = code
+ self.headers = headers
+ self.message = message
+
+ def __str__(self) -> str:
+ msg = indent(self.message, " ")
+ return f"{self.code}, message:\n{msg}"
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
+
+
+class BadHttpMessage(HttpProcessingError):
+
+ code = 400
+ message = "Bad Request"
+
+ def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
+ super().__init__(message=message, headers=headers)
+ self.args = (message,)
+
+
+class HttpBadRequest(BadHttpMessage):
+
+ code = 400
+ message = "Bad Request"
+
+
+class PayloadEncodingError(BadHttpMessage):
+ """Base class for payload errors"""
+
+
+class ContentEncodingError(PayloadEncodingError):
+ """Content encoding error."""
+
+
+class TransferEncodingError(PayloadEncodingError):
+ """transfer encoding error."""
+
+
+class ContentLengthError(PayloadEncodingError):
+ """Not enough data to satisfy content length header."""
+
+
+class DecompressSizeError(PayloadEncodingError):
+ """Decompressed size exceeds the configured limit."""
+
+
+class LineTooLong(BadHttpMessage):
+ def __init__(
+ self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
+ ) -> None:
+ super().__init__(
+ f"Got more than {limit} bytes ({actual_size}) when reading {line}."
+ )
+ self.args = (line, limit, actual_size)
+
+
+class InvalidHeader(BadHttpMessage):
+ def __init__(self, hdr: Union[bytes, str]) -> None:
+ hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
+ super().__init__(f"Invalid HTTP header: {hdr!r}")
+ self.hdr = hdr_s
+ self.args = (hdr,)
+
+
+class BadStatusLine(BadHttpMessage):
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
+ if not isinstance(line, str):
+ line = repr(line)
+ super().__init__(error or f"Bad status line {line!r}")
+ self.args = (line,)
+ self.line = line
+
+
+class BadHttpMethod(BadStatusLine):
+ """Invalid HTTP method in status line."""
+
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
+ super().__init__(line, error or f"Bad HTTP method in status line {line!r}")
+
+
+class InvalidURLError(BadHttpMessage):
+ pass
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_parser.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_parser.py"
new file mode 100644
index 0000000..393e76a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_parser.py"
@@ -0,0 +1,1086 @@
+import abc
+import asyncio
+import re
+import string
+from contextlib import suppress
+from enum import IntEnum
+from typing import (
+ Any,
+ ClassVar,
+ Final,
+ Generic,
+ List,
+ Literal,
+ NamedTuple,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs
+from .base_protocol import BaseProtocol
+from .compression_utils import (
+ DEFAULT_MAX_DECOMPRESS_SIZE,
+ HAS_BROTLI,
+ HAS_ZSTD,
+ BrotliDecompressor,
+ ZLibDecompressor,
+ ZSTDDecompressor,
+)
+from .helpers import (
+ _EXC_SENTINEL,
+ DEBUG,
+ EMPTY_BODY_METHODS,
+ EMPTY_BODY_STATUS_CODES,
+ NO_EXTENSIONS,
+ BaseTimerContext,
+ set_exception,
+)
+from .http_exceptions import (
+ BadHttpMessage,
+ BadHttpMethod,
+ BadStatusLine,
+ ContentEncodingError,
+ ContentLengthError,
+ DecompressSizeError,
+ InvalidHeader,
+ InvalidURLError,
+ LineTooLong,
+ TransferEncodingError,
+)
+from .http_writer import HttpVersion, HttpVersion10
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .typedefs import RawHeaders
+
+__all__ = (
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+)
+
+_SEP = Literal[b"\r\n", b"\n"]
+
+ASCIISET: Final[Set[str]] = set(string.printable)
+
+# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
+# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
+#
+# method = token
+# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
+# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
+# token = 1*tchar
+_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
+TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
+VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
+DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
+HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
+
+
+class RawRequestMessage(NamedTuple):
+ method: str
+ path: str
+ version: HttpVersion
+ headers: "CIMultiDictProxy[str]"
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
+ url: URL
+
+
+class RawResponseMessage(NamedTuple):
+ version: HttpVersion
+ code: int
+ reason: str
+ headers: CIMultiDictProxy[str]
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
+
+
+_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
+
+
+class ParseState(IntEnum):
+
+ PARSE_NONE = 0
+ PARSE_LENGTH = 1
+ PARSE_CHUNKED = 2
+ PARSE_UNTIL_EOF = 3
+
+
+class ChunkState(IntEnum):
+ PARSE_CHUNKED_SIZE = 0
+ PARSE_CHUNKED_CHUNK = 1
+ PARSE_CHUNKED_CHUNK_EOF = 2
+ PARSE_MAYBE_TRAILERS = 3
+ PARSE_TRAILERS = 4
+
+
+class HeadersParser:
+ def __init__(
+ self,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ lax: bool = False,
+ ) -> None:
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+ self._lax = lax
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
+ headers: CIMultiDict[str] = CIMultiDict()
+ # note: "raw" does not mean inclusion of OWS before/after the field value
+ raw_headers = []
+
+ lines_idx = 0
+ line = lines[lines_idx]
+ line_count = len(lines)
+
+ while line:
+ # Parse initial header name : value pair.
+ try:
+ bname, bvalue = line.split(b":", 1)
+ except ValueError:
+ raise InvalidHeader(line) from None
+
+ if len(bname) == 0:
+ raise InvalidHeader(bname)
+
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
+ if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
+ raise InvalidHeader(line)
+
+ bvalue = bvalue.lstrip(b" \t")
+ if len(bname) > self.max_field_size:
+ raise LineTooLong(
+ "request header name {}".format(
+ bname.decode("utf8", "backslashreplace")
+ ),
+ str(self.max_field_size),
+ str(len(bname)),
+ )
+ name = bname.decode("utf-8", "surrogateescape")
+ if not TOKENRE.fullmatch(name):
+ raise InvalidHeader(bname)
+
+ header_length = len(bvalue)
+
+ # next line
+ lines_idx += 1
+ line = lines[lines_idx]
+
+ # consume continuation lines
+ continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
+
+ # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
+ if continuation:
+ bvalue_lst = [bvalue]
+ while continuation:
+ header_length += len(line)
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "backslashreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+ bvalue_lst.append(line)
+
+ # next line
+ lines_idx += 1
+ if lines_idx < line_count:
+ line = lines[lines_idx]
+ if line:
+ continuation = line[0] in (32, 9) # (' ', '\t')
+ else:
+ line = b""
+ break
+ bvalue = b"".join(bvalue_lst)
+ else:
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "backslashreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+
+ bvalue = bvalue.strip(b" \t")
+ value = bvalue.decode("utf-8", "surrogateescape")
+
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
+ if "\n" in value or "\r" in value or "\x00" in value:
+ raise InvalidHeader(bvalue)
+
+ headers.add(name, value)
+ raw_headers.append((bname, bvalue))
+
+ return (CIMultiDictProxy(headers), tuple(raw_headers))
+
+
+def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
+ """Check if the upgrade header is supported."""
+ u = headers.get(hdrs.UPGRADE, "")
+ # .lower() can transform non-ascii characters.
+ return u.isascii() and u.lower() in {"tcp", "websocket"}
+
+
+class HttpParser(abc.ABC, Generic[_MsgT]):
+ lax: ClassVar[bool] = False
+
+ def __init__(
+ self,
+ protocol: Optional[BaseProtocol] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ limit: int = 2**16,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ timer: Optional[BaseTimerContext] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ payload_exception: Optional[Type[BaseException]] = None,
+ response_with_body: bool = True,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ ) -> None:
+ self.protocol = protocol
+ self.loop = loop
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+ self.timer = timer
+ self.code = code
+ self.method = method
+ self.payload_exception = payload_exception
+ self.response_with_body = response_with_body
+ self.read_until_eof = read_until_eof
+
+ self._lines: List[bytes] = []
+ self._tail = b""
+ self._upgraded = False
+ self._payload = None
+ self._payload_parser: Optional[HttpPayloadParser] = None
+ self._auto_decompress = auto_decompress
+ self._limit = limit
+ self._headers_parser = HeadersParser(
+ max_line_size, max_headers, max_field_size, self.lax
+ )
+
+ @abc.abstractmethod
+ def parse_message(self, lines: List[bytes]) -> _MsgT: ...
+
+ @abc.abstractmethod
+ def _is_chunked_te(self, te: str) -> bool: ...
+
+ def feed_eof(self) -> Optional[_MsgT]:
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+ else:
+ # try to extract partial message
+ if self._tail:
+ self._lines.append(self._tail)
+
+ if self._lines:
+ if self._lines[-1] != "\r\n":
+ self._lines.append(b"")
+ with suppress(Exception):
+ return self.parse_message(self._lines)
+ return None
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: _SEP = b"\r\n",
+ EMPTY: bytes = b"",
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
+ METH_CONNECT: str = hdrs.METH_CONNECT,
+ SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
+ ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
+
+ messages = []
+
+ if self._tail:
+ data, self._tail = self._tail + data, b""
+
+ data_len = len(data)
+ start_pos = 0
+ loop = self.loop
+
+ should_close = False
+ while start_pos < data_len:
+
+ # read HTTP message (request/response line + headers), \r\n\r\n
+ # and split by lines
+ if self._payload_parser is None and not self._upgraded:
+ pos = data.find(SEP, start_pos)
+ # consume \r\n
+ if pos == start_pos and not self._lines:
+ start_pos = pos + len(SEP)
+ continue
+
+ if pos >= start_pos:
+ if should_close:
+ raise BadHttpMessage("Data after `Connection: close`")
+
+ # line found
+ line = data[start_pos:pos]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._lines.append(line)
+ start_pos = pos + len(SEP)
+
+ # \r\n\r\n found
+ if self._lines[-1] == EMPTY:
+ try:
+ msg: _MsgT = self.parse_message(self._lines)
+ finally:
+ self._lines.clear()
+
+ def get_content_length() -> Optional[int]:
+ # payload length
+ length_hdr = msg.headers.get(CONTENT_LENGTH)
+ if length_hdr is None:
+ return None
+
+ # Shouldn't allow +/- or other number formats.
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
+ # msg.headers is already stripped of leading/trailing wsp
+ if not DIGITS.fullmatch(length_hdr):
+ raise InvalidHeader(CONTENT_LENGTH)
+
+ return int(length_hdr)
+
+ length = get_content_length()
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in msg.headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ self._upgraded = msg.upgrade and _is_supported_upgrade(
+ msg.headers
+ )
+
+ method = getattr(msg, "method", self.method)
+ # code is only present on responses
+ code = getattr(msg, "code", 0)
+
+ assert self.protocol is not None
+ # calculate payload
+ empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
+ method and method in EMPTY_BODY_METHODS
+ )
+ if not empty_body and (
+ ((length is not None and length > 0) or msg.chunked)
+ and not self._upgraded
+ ):
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ headers_parser=self._headers_parser,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ elif method == METH_CONNECT:
+ assert isinstance(msg, RawRequestMessage)
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ self._upgraded = True
+ self._payload_parser = HttpPayloadParser(
+ payload,
+ method=msg.method,
+ compression=msg.compression,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ headers_parser=self._headers_parser,
+ )
+ elif not empty_body and length is None and self.read_until_eof:
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ headers_parser=self._headers_parser,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ else:
+ payload = EMPTY_PAYLOAD
+
+ messages.append((msg, payload))
+ should_close = msg.should_close
+ else:
+ self._tail = data[start_pos:]
+ data = EMPTY
+ break
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgraded:
+ assert not self._lines
+ break
+
+ # feed payload
+ elif data and start_pos < data_len:
+ assert not self._lines
+ assert self._payload_parser is not None
+ try:
+ eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
+ except BaseException as underlying_exc:
+ reraised_exc = underlying_exc
+ if self.payload_exception is not None:
+ reraised_exc = self.payload_exception(str(underlying_exc))
+
+ set_exception(
+ self._payload_parser.payload,
+ reraised_exc,
+ underlying_exc,
+ )
+
+ eof = True
+ data = b""
+ if isinstance(
+ underlying_exc, (InvalidHeader, TransferEncodingError)
+ ):
+ raise
+
+ if eof:
+ start_pos = 0
+ data_len = len(data)
+ self._payload_parser = None
+ continue
+ else:
+ break
+
+ if data and start_pos < data_len:
+ data = data[start_pos:]
+ else:
+ data = EMPTY
+
+ return messages, self._upgraded, data
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple[
+ "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
+ ]:
+ """Parses RFC 5322 headers from a stream.
+
+ Line continuations are supported. Returns list of header name
+ and value pairs. Header name is in upper case.
+ """
+ headers, raw_headers = self._headers_parser.parse_headers(lines)
+ close_conn = None
+ encoding = None
+ upgrade = False
+ chunked = False
+
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
+ singletons = (
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TYPE,
+ hdrs.ETAG,
+ hdrs.HOST,
+ hdrs.MAX_FORWARDS,
+ hdrs.SERVER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.USER_AGENT,
+ )
+ bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
+ if bad_hdr is not None:
+ raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
+
+ # keep-alive
+ conn = headers.get(hdrs.CONNECTION)
+ if conn:
+ v = conn.lower()
+ if v == "close":
+ close_conn = True
+ elif v == "keep-alive":
+ close_conn = False
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
+ elif v == "upgrade" and headers.get(hdrs.UPGRADE):
+ upgrade = True
+
+ # encoding
+ enc = headers.get(hdrs.CONTENT_ENCODING, "")
+ if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}:
+ encoding = enc
+
+ # chunking
+ te = headers.get(hdrs.TRANSFER_ENCODING)
+ if te is not None:
+ if self._is_chunked_te(te):
+ chunked = True
+
+ if hdrs.CONTENT_LENGTH in headers:
+ raise BadHttpMessage(
+ "Transfer-Encoding can't be present with Content-Length",
+ )
+
+ return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
+
+ def set_upgraded(self, val: bool) -> None:
+ """Set connection upgraded (to websocket) mode.
+
+ :param bool val: new state.
+ """
+ self._upgraded = val
+
+
+class HttpRequestParser(HttpParser[RawRequestMessage]):
+ """Read request status line.
+
+ Exception .http_exceptions.BadStatusLine
+ could be raised in case of any errors in status line.
+ Returns RawRequestMessage.
+ """
+
+ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
+ # request line
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ method, path, version = line.split(" ", maxsplit=2)
+ except ValueError:
+ raise BadHttpMethod(line) from None
+
+ if len(path) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(path))
+ )
+
+ # method
+ if not TOKENRE.fullmatch(method):
+ raise BadHttpMethod(method)
+
+ # version
+ match = VERSRE.fullmatch(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ if method == "CONNECT":
+ # authority-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
+ url = URL.build(authority=path, encoded=True)
+ elif path.startswith("/"):
+ # origin-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
+ path_part, _hash_separator, url_fragment = path.partition("#")
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
+
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
+ # NOTE: parser does, otherwise it results into the same
+ # NOTE: HTTP Request-Line input producing different
+ # NOTE: `yarl.URL()` objects
+ url = URL.build(
+ path=path_part,
+ query_string=qs_part,
+ fragment=url_fragment,
+ encoded=True,
+ )
+ elif path == "*" and method == "OPTIONS":
+ # asterisk-form,
+ url = URL(path, encoded=True)
+ else:
+ # absolute-form for proxy maybe,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
+ url = URL(path, encoded=True)
+ if url.scheme == "":
+ # not absolute-form
+ raise InvalidURLError(
+ path.encode(errors="surrogateescape").decode("latin1")
+ )
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines[1:])
+
+ if close is None: # then the headers weren't set in the request
+ if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
+ close = True
+ else: # HTTP 1.1 must ask to close.
+ close = False
+
+ return RawRequestMessage(
+ method,
+ path,
+ version_o,
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ url,
+ )
+
+ def _is_chunked_te(self, te: str) -> bool:
+ te = te.rsplit(",", maxsplit=1)[-1].strip(" \t")
+ # .lower() transforms some non-ascii chars, so must check first.
+ if te.isascii() and te.lower() == "chunked":
+ return True
+ # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
+ raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
+
+
+class HttpResponseParser(HttpParser[RawResponseMessage]):
+ """Read response status line and headers.
+
+ BadStatusLine could be raised in case of any errors in status line.
+ Returns RawResponseMessage.
+ """
+
+ # Lax mode should only be enabled on response parser.
+ lax = not DEBUG
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: Optional[_SEP] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
+ if SEP is None:
+ SEP = b"\r\n" if DEBUG else b"\n"
+ return super().feed_data(data, SEP, *args, **kwargs)
+
+ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ version, status = line.split(maxsplit=1)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ try:
+ status, reason = status.split(maxsplit=1)
+ except ValueError:
+ status = status.strip()
+ reason = ""
+
+ if len(reason) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(reason))
+ )
+
+ # version
+ match = VERSRE.fullmatch(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ # The status code is a three-digit ASCII number, no padding
+ if len(status) != 3 or not DIGITS.fullmatch(status):
+ raise BadStatusLine(line)
+ status_i = int(status)
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines[1:])
+
+ if close is None:
+ if version_o <= HttpVersion10:
+ close = True
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
+ elif 100 <= status_i < 200 or status_i in {204, 304}:
+ close = False
+ elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
+ close = False
+ else:
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
+ close = True
+
+ return RawResponseMessage(
+ version_o,
+ status_i,
+ reason.strip(),
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ )
+
+ def _is_chunked_te(self, te: str) -> bool:
+ # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
+ return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
+
+
+class HttpPayloadParser:
+ def __init__(
+ self,
+ payload: StreamReader,
+ length: Optional[int] = None,
+ chunked: bool = False,
+ compression: Optional[str] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ response_with_body: bool = True,
+ auto_decompress: bool = True,
+ lax: bool = False,
+ *,
+ headers_parser: HeadersParser,
+ ) -> None:
+ self._length = 0
+ self._type = ParseState.PARSE_UNTIL_EOF
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ self._chunk_size = 0
+ self._chunk_tail = b""
+ self._auto_decompress = auto_decompress
+ self._lax = lax
+ self._headers_parser = headers_parser
+ self._trailer_lines: list[bytes] = []
+ self.done = False
+
+ # payload decompression wrapper
+ if response_with_body and compression and self._auto_decompress:
+ real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
+ payload, compression
+ )
+ else:
+ real_payload = payload
+
+ # payload parser
+ if not response_with_body:
+ # don't parse payload if it's not expected to be received
+ self._type = ParseState.PARSE_NONE
+ real_payload.feed_eof()
+ self.done = True
+ elif chunked:
+ self._type = ParseState.PARSE_CHUNKED
+ elif length is not None:
+ self._type = ParseState.PARSE_LENGTH
+ self._length = length
+ if self._length == 0:
+ real_payload.feed_eof()
+ self.done = True
+
+ self.payload = real_payload
+
+ def feed_eof(self) -> None:
+ if self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_eof()
+ elif self._type == ParseState.PARSE_LENGTH:
+ raise ContentLengthError(
+ "Not enough data to satisfy content length header."
+ )
+ elif self._type == ParseState.PARSE_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data to satisfy transfer length header."
+ )
+
+ def feed_data(
+ self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
+ ) -> Tuple[bool, bytes]:
+ # Read specified amount of bytes
+ if self._type == ParseState.PARSE_LENGTH:
+ required = self._length
+ chunk_len = len(chunk)
+
+ if required >= chunk_len:
+ self._length = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ if self._length == 0:
+ self.payload.feed_eof()
+ return True, b""
+ else:
+ self._length = 0
+ self.payload.feed_data(chunk[:required], required)
+ self.payload.feed_eof()
+ return True, chunk[required:]
+
+ # Chunked transfer encoding parser
+ elif self._type == ParseState.PARSE_CHUNKED:
+ if self._chunk_tail:
+ chunk = self._chunk_tail + chunk
+ self._chunk_tail = b""
+
+ while chunk:
+
+ # read next chunk size
+ if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ i = chunk.find(CHUNK_EXT, 0, pos)
+ if i >= 0:
+ size_b = chunk[:i] # strip chunk-extensions
+ # Verify no LF in the chunk-extension
+ if b"\n" in (ext := chunk[i:pos]):
+ exc = TransferEncodingError(
+ f"Unexpected LF in chunk-extension: {ext!r}"
+ )
+ set_exception(self.payload, exc)
+ raise exc
+ else:
+ size_b = chunk[:pos]
+
+ if self._lax: # Allow whitespace in lax mode.
+ size_b = size_b.strip()
+
+ if not re.fullmatch(HEXDIGITS, size_b):
+ exc = TransferEncodingError(
+ chunk[:pos].decode("ascii", "surrogateescape")
+ )
+ set_exception(self.payload, exc)
+ raise exc
+ size = int(bytes(size_b), 16)
+
+ chunk = chunk[pos + len(SEP) :]
+ if size == 0: # eof marker
+ self._chunk = ChunkState.PARSE_TRAILERS
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
+ else:
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
+ self._chunk_size = size
+ self.payload.begin_http_chunk_receiving()
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # read chunk and feed buffer
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
+ required = self._chunk_size
+ chunk_len = len(chunk)
+
+ if required > chunk_len:
+ self._chunk_size = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ return False, b""
+ else:
+ self._chunk_size = 0
+ self.payload.feed_data(chunk[:required], required)
+ chunk = chunk[required:]
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
+ self.payload.end_http_chunk_receiving()
+
+ # toss the CRLF at the end of the chunk
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
+ if chunk[: len(SEP)] == SEP:
+ chunk = chunk[len(SEP) :]
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ if self._chunk == ChunkState.PARSE_TRAILERS:
+ pos = chunk.find(SEP)
+ if pos < 0: # No line found
+ self._chunk_tail = chunk
+ return False, b""
+
+ line = chunk[:pos]
+ chunk = chunk[pos + len(SEP) :]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._trailer_lines.append(line)
+
+ # \r\n\r\n found, end of stream
+ if self._trailer_lines[-1] == b"":
+ # Headers and trailers are defined the same way,
+ # so we reuse the HeadersParser here.
+ try:
+ trailers, raw_trailers = self._headers_parser.parse_headers(
+ self._trailer_lines
+ )
+ finally:
+ self._trailer_lines.clear()
+ self.payload.feed_eof()
+ return True, chunk
+
+ # Read all bytes until eof
+ elif self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_data(chunk, len(chunk))
+
+ return False, b""
+
+
+class DeflateBuffer:
+ """DeflateStream decompress stream and feed data into specified stream."""
+
+ decompressor: Any
+
+ def __init__(
+ self,
+ out: StreamReader,
+ encoding: Optional[str],
+ max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
+ ) -> None:
+ self.out = out
+ self.size = 0
+ out.total_compressed_bytes = self.size
+ self.encoding = encoding
+ self._started_decoding = False
+
+ self.decompressor: Union[BrotliDecompressor, ZLibDecompressor, ZSTDDecompressor]
+ if encoding == "br":
+ if not HAS_BROTLI: # pragma: no cover
+ raise ContentEncodingError(
+ "Can not decode content-encoding: brotli (br). "
+ "Please install `Brotli`"
+ )
+ self.decompressor = BrotliDecompressor()
+ elif encoding == "zstd":
+ if not HAS_ZSTD:
+ raise ContentEncodingError(
+ "Can not decode content-encoding: zstandard (zstd). "
+ "Please install `backports.zstd`"
+ )
+ self.decompressor = ZSTDDecompressor()
+ else:
+ self.decompressor = ZLibDecompressor(encoding=encoding)
+
+ self._max_decompress_size = max_decompress_size
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ set_exception(self.out, exc, exc_cause)
+
+ def feed_data(self, chunk: bytes, size: int) -> None:
+ if not size:
+ return
+
+ self.size += size
+ self.out.total_compressed_bytes = self.size
+
+ # RFC1950
+ # bits 0..3 = CM = 0b1000 = 8 = "deflate"
+ # bits 4..7 = CINFO = 1..7 = windows size.
+ if (
+ not self._started_decoding
+ and self.encoding == "deflate"
+ and chunk[0] & 0xF != 8
+ ):
+ # Change the decoder to decompress incorrectly compressed data
+ # Actually we should issue a warning about non-RFC-compliant data.
+ self.decompressor = ZLibDecompressor(
+ encoding=self.encoding, suppress_deflate_header=True
+ )
+
+ try:
+ # Decompress with limit + 1 so we can detect if output exceeds limit
+ chunk = self.decompressor.decompress_sync(
+ chunk, max_length=self._max_decompress_size + 1
+ )
+ except Exception:
+ raise ContentEncodingError(
+ "Can not decode content-encoding: %s" % self.encoding
+ )
+
+ self._started_decoding = True
+
+ # Check if decompression limit was exceeded
+ if len(chunk) > self._max_decompress_size:
+ raise DecompressSizeError(
+ "Decompressed data exceeds the configured limit of %d bytes"
+ % self._max_decompress_size
+ )
+
+ if chunk:
+ self.out.feed_data(chunk, len(chunk))
+
+ def feed_eof(self) -> None:
+ chunk = self.decompressor.flush()
+
+ if chunk or self.size > 0:
+ self.out.feed_data(chunk, len(chunk))
+ if self.encoding == "deflate" and not self.decompressor.eof:
+ raise ContentEncodingError("deflate")
+
+ self.out.feed_eof()
+
+ def begin_http_chunk_receiving(self) -> None:
+ self.out.begin_http_chunk_receiving()
+
+ def end_http_chunk_receiving(self) -> None:
+ self.out.end_http_chunk_receiving()
+
+
+HttpRequestParserPy = HttpRequestParser
+HttpResponseParserPy = HttpResponseParser
+RawRequestMessagePy = RawRequestMessage
+RawResponseMessagePy = RawResponseMessage
+
+try:
+ if not NO_EXTENSIONS:
+ from ._http_parser import ( # type: ignore[import-not-found,no-redef]
+ HttpRequestParser,
+ HttpResponseParser,
+ RawRequestMessage,
+ RawResponseMessage,
+ )
+
+ HttpRequestParserC = HttpRequestParser
+ HttpResponseParserC = HttpResponseParser
+ RawRequestMessageC = RawRequestMessage
+ RawResponseMessageC = RawResponseMessage
+except ImportError: # pragma: no cover
+ pass
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_websocket.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_websocket.py"
new file mode 100644
index 0000000..6b4b30e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_websocket.py"
@@ -0,0 +1,36 @@
+"""WebSocket protocol versions 13 and 8."""
+
+from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse
+from ._websocket.models import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WebSocketError,
+ WSCloseCode,
+ WSHandshakeError,
+ WSMessage,
+ WSMsgType,
+)
+from ._websocket.reader import WebSocketReader
+from ._websocket.writer import WebSocketWriter
+
+# Messages that the WebSocketResponse.receive needs to handle internally
+_INTERNAL_RECEIVE_TYPES = frozenset(
+ (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG)
+)
+
+
+__all__ = (
+ "WS_CLOSED_MESSAGE",
+ "WS_CLOSING_MESSAGE",
+ "WS_KEY",
+ "WebSocketReader",
+ "WebSocketWriter",
+ "WSMessage",
+ "WebSocketError",
+ "WSMsgType",
+ "WSCloseCode",
+ "ws_ext_gen",
+ "ws_ext_parse",
+ "WSHandshakeError",
+ "WSMessage",
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_writer.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_writer.py"
new file mode 100644
index 0000000..a140b21
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/http_writer.py"
@@ -0,0 +1,378 @@
+"""Http related parsers and protocol."""
+
+import asyncio
+import sys
+from typing import ( # noqa
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ List,
+ NamedTuple,
+ Optional,
+ Union,
+)
+
+from multidict import CIMultiDict
+
+from .abc import AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .client_exceptions import ClientConnectionResetError
+from .compression_utils import ZLibCompressor
+from .helpers import NO_EXTENSIONS
+
+__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
+
+
+MIN_PAYLOAD_FOR_WRITELINES = 2048
+IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2)
+IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9)
+SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9
+# writelines is not safe for use
+# on Python 3.12+ until 3.12.9
+# on Python 3.13+ until 3.13.2
+# and on older versions it not any faster than write
+# CVE-2024-12254: https://github.com/python/cpython/pull/127656
+
+
+class HttpVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+HttpVersion10 = HttpVersion(1, 0)
+HttpVersion11 = HttpVersion(1, 1)
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
+
+
+class StreamWriter(AbstractStreamWriter):
+
+ length: Optional[int] = None
+ chunked: bool = False
+ _eof: bool = False
+ _compress: Optional[ZLibCompressor] = None
+
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ loop: asyncio.AbstractEventLoop,
+ on_chunk_sent: _T_OnChunkSent = None,
+ on_headers_sent: _T_OnHeadersSent = None,
+ ) -> None:
+ self._protocol = protocol
+ self.loop = loop
+ self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
+ self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
+ self._headers_buf: Optional[bytes] = None
+ self._headers_written: bool = False
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ return self._protocol.transport
+
+ @property
+ def protocol(self) -> BaseProtocol:
+ return self._protocol
+
+ def enable_chunking(self) -> None:
+ self.chunked = True
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: Optional[int] = None
+ ) -> None:
+ self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
+
+ def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
+ size = len(chunk)
+ self.buffer_size += size
+ self.output_size += size
+ transport = self._protocol.transport
+ if transport is None or transport.is_closing():
+ raise ClientConnectionResetError("Cannot write to closing transport")
+ transport.write(chunk)
+
+ def _writelines(self, chunks: Iterable[bytes]) -> None:
+ size = 0
+ for chunk in chunks:
+ size += len(chunk)
+ self.buffer_size += size
+ self.output_size += size
+ transport = self._protocol.transport
+ if transport is None or transport.is_closing():
+ raise ClientConnectionResetError("Cannot write to closing transport")
+ if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES:
+ transport.write(b"".join(chunks))
+ else:
+ transport.writelines(chunks)
+
+ def _write_chunked_payload(
+ self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
+ ) -> None:
+ """Write a chunk with proper chunked encoding."""
+ chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
+ self._writelines((chunk_len_pre, chunk, b"\r\n"))
+
+ def _send_headers_with_payload(
+ self,
+ chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"],
+ is_eof: bool,
+ ) -> None:
+ """Send buffered headers with payload, coalescing into single write."""
+ # Mark headers as written
+ self._headers_written = True
+ headers_buf = self._headers_buf
+ self._headers_buf = None
+
+ if TYPE_CHECKING:
+ # Safe because callers (write() and write_eof()) only invoke this method
+ # after checking that self._headers_buf is truthy
+ assert headers_buf is not None
+
+ if not self.chunked:
+ # Non-chunked: coalesce headers with body
+ if chunk:
+ self._writelines((headers_buf, chunk))
+ else:
+ self._write(headers_buf)
+ return
+
+ # Coalesce headers with chunked data
+ if chunk:
+ chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
+ if is_eof:
+ self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n0\r\n\r\n"))
+ else:
+ self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n"))
+ elif is_eof:
+ self._writelines((headers_buf, b"0\r\n\r\n"))
+ else:
+ self._write(headers_buf)
+
+ async def write(
+ self,
+ chunk: Union[bytes, bytearray, memoryview],
+ *,
+ drain: bool = True,
+ LIMIT: int = 0x10000,
+ ) -> None:
+ """
+ Writes chunk of data to a stream.
+
+ write_eof() indicates end of stream.
+ writer can't be used after write_eof() method being called.
+ write() return drain future.
+ """
+ if self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if isinstance(chunk, memoryview):
+ if chunk.nbytes != len(chunk):
+ # just reshape it
+ chunk = chunk.cast("c")
+
+ if self._compress is not None:
+ chunk = await self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self.length is not None:
+ chunk_len = len(chunk)
+ if self.length >= chunk_len:
+ self.length = self.length - chunk_len
+ else:
+ chunk = chunk[: self.length]
+ self.length = 0
+ if not chunk:
+ return
+
+ # Handle buffered headers for small payload optimization
+ if self._headers_buf and not self._headers_written:
+ self._send_headers_with_payload(chunk, False)
+ if drain and self.buffer_size > LIMIT:
+ self.buffer_size = 0
+ await self.drain()
+ return
+
+ if chunk:
+ if self.chunked:
+ self._write_chunked_payload(chunk)
+ else:
+ self._write(chunk)
+
+ if drain and self.buffer_size > LIMIT:
+ self.buffer_size = 0
+ await self.drain()
+
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write headers to the stream."""
+ if self._on_headers_sent is not None:
+ await self._on_headers_sent(headers)
+ # status + headers
+ buf = _serialize_headers(status_line, headers)
+ self._headers_written = False
+ self._headers_buf = buf
+
+ def send_headers(self) -> None:
+ """Force sending buffered headers if not already sent."""
+ if not self._headers_buf or self._headers_written:
+ return
+
+ self._headers_written = True
+ headers_buf = self._headers_buf
+ self._headers_buf = None
+
+ if TYPE_CHECKING:
+ # Safe because we only enter this block when self._headers_buf is truthy
+ assert headers_buf is not None
+
+ self._write(headers_buf)
+
+ def set_eof(self) -> None:
+ """Indicate that the message is complete."""
+ if self._eof:
+ return
+
+ # If headers haven't been sent yet, send them now
+ # This handles the case where there's no body at all
+ if self._headers_buf and not self._headers_written:
+ self._headers_written = True
+ headers_buf = self._headers_buf
+ self._headers_buf = None
+
+ if TYPE_CHECKING:
+ # Safe because we only enter this block when self._headers_buf is truthy
+ assert headers_buf is not None
+
+ # Combine headers and chunked EOF marker in a single write
+ if self.chunked:
+ self._writelines((headers_buf, b"0\r\n\r\n"))
+ else:
+ self._write(headers_buf)
+ elif self.chunked and self._headers_written:
+ # Headers already sent, just send the final chunk marker
+ self._write(b"0\r\n\r\n")
+
+ self._eof = True
+
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ if self._eof:
+ return
+
+ if chunk and self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ # Handle body/compression
+ if self._compress:
+ chunks: List[bytes] = []
+ chunks_len = 0
+ if chunk and (compressed_chunk := await self._compress.compress(chunk)):
+ chunks_len = len(compressed_chunk)
+ chunks.append(compressed_chunk)
+
+ flush_chunk = self._compress.flush()
+ chunks_len += len(flush_chunk)
+ chunks.append(flush_chunk)
+ assert chunks_len
+
+ # Send buffered headers with compressed data if not yet sent
+ if self._headers_buf and not self._headers_written:
+ self._headers_written = True
+ headers_buf = self._headers_buf
+ self._headers_buf = None
+
+ if self.chunked:
+ # Coalesce headers with compressed chunked data
+ chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
+ self._writelines(
+ (headers_buf, chunk_len_pre, *chunks, b"\r\n0\r\n\r\n")
+ )
+ else:
+ # Coalesce headers with compressed data
+ self._writelines((headers_buf, *chunks))
+ await self.drain()
+ self._eof = True
+ return
+
+ # Headers already sent, just write compressed data
+ if self.chunked:
+ chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
+ self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n"))
+ elif len(chunks) > 1:
+ self._writelines(chunks)
+ else:
+ self._write(chunks[0])
+ await self.drain()
+ self._eof = True
+ return
+
+ # No compression - send buffered headers if not yet sent
+ if self._headers_buf and not self._headers_written:
+ # Use helper to send headers with payload
+ self._send_headers_with_payload(chunk, True)
+ await self.drain()
+ self._eof = True
+ return
+
+ # Handle remaining body
+ if self.chunked:
+ if chunk:
+ # Write final chunk with EOF marker
+ self._writelines(
+ (f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n0\r\n\r\n")
+ )
+ else:
+ self._write(b"0\r\n\r\n")
+ await self.drain()
+ self._eof = True
+ return
+
+ if chunk:
+ self._write(chunk)
+ await self.drain()
+
+ self._eof = True
+
+ async def drain(self) -> None:
+ """Flush the write buffer.
+
+ The intended use is to write
+
+ await w.write(data)
+ await w.drain()
+ """
+ protocol = self._protocol
+ if protocol.transport is not None and protocol._paused:
+ await protocol._drain_helper()
+
+
+def _safe_header(string: str) -> str:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return detected in headers. "
+ "Potential header injection attack."
+ )
+ return string
+
+
+def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
+ headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
+ line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
+ return line.encode("utf-8")
+
+
+_serialize_headers = _py_serialize_headers
+
+try:
+ import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
+
+ _c_serialize_headers = _http_writer._serialize_headers
+ if not NO_EXTENSIONS:
+ _serialize_headers = _c_serialize_headers
+except ImportError:
+ pass
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/log.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/log.py"
new file mode 100644
index 0000000..3cecea2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/log.py"
@@ -0,0 +1,8 @@
+import logging
+
+access_logger = logging.getLogger("aiohttp.access")
+client_logger = logging.getLogger("aiohttp.client")
+internal_logger = logging.getLogger("aiohttp.internal")
+server_logger = logging.getLogger("aiohttp.server")
+web_logger = logging.getLogger("aiohttp.web")
+ws_logger = logging.getLogger("aiohttp.websocket")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/multipart.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/multipart.py"
new file mode 100644
index 0000000..9c37f0b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/multipart.py"
@@ -0,0 +1,1152 @@
+import base64
+import binascii
+import json
+import re
+import sys
+import uuid
+import warnings
+from collections import deque
+from collections.abc import Mapping, Sequence
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Deque,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+from urllib.parse import parse_qsl, unquote, urlencode
+
+from multidict import CIMultiDict, CIMultiDictProxy
+
+from .abc import AbstractStreamWriter
+from .compression_utils import (
+ DEFAULT_MAX_DECOMPRESS_SIZE,
+ ZLibCompressor,
+ ZLibDecompressor,
+)
+from .hdrs import (
+ CONTENT_DISPOSITION,
+ CONTENT_ENCODING,
+ CONTENT_LENGTH,
+ CONTENT_TRANSFER_ENCODING,
+ CONTENT_TYPE,
+)
+from .helpers import CHAR, TOKEN, parse_mimetype, reify
+from .http import HeadersParser
+from .log import internal_logger
+from .payload import (
+ JsonPayload,
+ LookupError,
+ Order,
+ Payload,
+ StringPayload,
+ get_payload,
+ payload_type,
+)
+from .streams import StreamReader
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ from typing import TypeVar
+
+ Self = TypeVar("Self", bound="BodyPartReader")
+
+__all__ = (
+ "MultipartReader",
+ "MultipartWriter",
+ "BodyPartReader",
+ "BadContentDispositionHeader",
+ "BadContentDispositionParam",
+ "parse_content_disposition",
+ "content_disposition_filename",
+)
+
+
+if TYPE_CHECKING:
+ from .client_reqrep import ClientResponse
+
+
+class BadContentDispositionHeader(RuntimeWarning):
+ pass
+
+
+class BadContentDispositionParam(RuntimeWarning):
+ pass
+
+
+def parse_content_disposition(
+ header: Optional[str],
+) -> Tuple[Optional[str], Dict[str, str]]:
+ def is_token(string: str) -> bool:
+ return bool(string) and TOKEN >= set(string)
+
+ def is_quoted(string: str) -> bool:
+ return string[0] == string[-1] == '"'
+
+ def is_rfc5987(string: str) -> bool:
+ return is_token(string) and string.count("'") == 2
+
+ def is_extended_param(string: str) -> bool:
+ return string.endswith("*")
+
+ def is_continuous_param(string: str) -> bool:
+ pos = string.find("*") + 1
+ if not pos:
+ return False
+ substring = string[pos:-1] if string.endswith("*") else string[pos:]
+ return substring.isdigit()
+
+ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
+ return re.sub(f"\\\\([{chars}])", "\\1", text)
+
+ if not header:
+ return None, {}
+
+ disptype, *parts = header.split(";")
+ if not is_token(disptype):
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ params: Dict[str, str] = {}
+ while parts:
+ item = parts.pop(0)
+
+ if not item: # To handle trailing semicolons
+ warnings.warn(BadContentDispositionHeader(header))
+ continue
+
+ if "=" not in item:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ key, value = item.split("=", 1)
+ key = key.lower().strip()
+ value = value.lstrip()
+
+ if key in params:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ if not is_token(key):
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ elif is_continuous_param(key):
+ if is_quoted(value):
+ value = unescape(value[1:-1])
+ elif not is_token(value):
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ elif is_extended_param(key):
+ if is_rfc5987(value):
+ encoding, _, value = value.split("'", 2)
+ encoding = encoding or "utf-8"
+ else:
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ try:
+ value = unquote(value, encoding, "strict")
+ except UnicodeDecodeError: # pragma: nocover
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ else:
+ failed = True
+ if is_quoted(value):
+ failed = False
+ value = unescape(value[1:-1].lstrip("\\/"))
+ elif is_token(value):
+ failed = False
+ elif parts:
+ # maybe just ; in filename, in any case this is just
+ # one case fix, for proper fix we need to redesign parser
+ _value = f"{value};{parts[0]}"
+ if is_quoted(_value):
+ parts.pop(0)
+ value = unescape(_value[1:-1].lstrip("\\/"))
+ failed = False
+
+ if failed:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ params[key] = value
+
+ return disptype.lower(), params
+
+
+def content_disposition_filename(
+ params: Mapping[str, str], name: str = "filename"
+) -> Optional[str]:
+ name_suf = "%s*" % name
+ if not params:
+ return None
+ elif name_suf in params:
+ return params[name_suf]
+ elif name in params:
+ return params[name]
+ else:
+ parts = []
+ fnparams = sorted(
+ (key, value) for key, value in params.items() if key.startswith(name_suf)
+ )
+ for num, (key, value) in enumerate(fnparams):
+ _, tail = key.split("*", 1)
+ if tail.endswith("*"):
+ tail = tail[:-1]
+ if tail == str(num):
+ parts.append(value)
+ else:
+ break
+ if not parts:
+ return None
+ value = "".join(parts)
+ if "'" in value:
+ encoding, _, value = value.split("'", 2)
+ encoding = encoding or "utf-8"
+ return unquote(value, encoding, "strict")
+ return value
+
+
+class MultipartResponseWrapper:
+ """Wrapper around the MultipartReader.
+
+ It takes care about
+ underlying connection and close it when it needs in.
+ """
+
+ def __init__(
+ self,
+ resp: "ClientResponse",
+ stream: "MultipartReader",
+ ) -> None:
+ self.resp = resp
+ self.stream = stream
+
+ def __aiter__(self) -> "MultipartResponseWrapper":
+ return self
+
+ async def __anext__(
+ self,
+ ) -> Union["MultipartReader", "BodyPartReader"]:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ def at_eof(self) -> bool:
+ """Returns True when all response data had been read."""
+ return self.resp.content.at_eof()
+
+ async def next(
+ self,
+ ) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
+ """Emits next multipart reader object."""
+ item = await self.stream.next()
+ if self.stream.at_eof():
+ await self.release()
+ return item
+
+ async def release(self) -> None:
+ """Release the connection gracefully.
+
+ All remaining content is read to the void.
+ """
+ await self.resp.release()
+
+
+class BodyPartReader:
+ """Multipart reader for single body part."""
+
+ chunk_size = 8192
+
+ def __init__(
+ self,
+ boundary: bytes,
+ headers: "CIMultiDictProxy[str]",
+ content: StreamReader,
+ *,
+ subtype: str = "mixed",
+ default_charset: Optional[str] = None,
+ max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE,
+ ) -> None:
+ self.headers = headers
+ self._boundary = boundary
+ self._boundary_len = len(boundary) + 2 # Boundary + \r\n
+ self._content = content
+ self._default_charset = default_charset
+ self._at_eof = False
+ self._is_form_data = subtype == "form-data"
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
+ length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None)
+ self._length = int(length) if length is not None else None
+ self._read_bytes = 0
+ self._unread: Deque[bytes] = deque()
+ self._prev_chunk: Optional[bytes] = None
+ self._content_eof = 0
+ self._cache: Dict[str, Any] = {}
+ self._max_decompress_size = max_decompress_size
+
+ def __aiter__(self: Self) -> Self:
+ return self
+
+ async def __anext__(self) -> bytes:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ async def next(self) -> Optional[bytes]:
+ item = await self.read()
+ if not item:
+ return None
+ return item
+
+ async def read(self, *, decode: bool = False) -> bytes:
+ """Reads body part data.
+
+ decode: Decodes data following by encoding
+ method from Content-Encoding header. If it missed
+ data remains untouched
+ """
+ if self._at_eof:
+ return b""
+ data = bytearray()
+ while not self._at_eof:
+ data.extend(await self.read_chunk(self.chunk_size))
+ if decode:
+ return await self.decode(data)
+ return data
+
+ async def read_chunk(self, size: int = chunk_size) -> bytes:
+ """Reads body part content chunk of the specified size.
+
+ size: chunk size
+ """
+ if self._at_eof:
+ return b""
+ if self._length:
+ chunk = await self._read_chunk_from_length(size)
+ else:
+ chunk = await self._read_chunk_from_stream(size)
+
+ # For the case of base64 data, we must read a fragment of size with a
+ # remainder of 0 by dividing by 4 for string without symbols \n or \r
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING)
+ if encoding and encoding.lower() == "base64":
+ stripped_chunk = b"".join(chunk.split())
+ remainder = len(stripped_chunk) % 4
+
+ while remainder != 0 and not self.at_eof():
+ over_chunk_size = 4 - remainder
+ over_chunk = b""
+
+ if self._prev_chunk:
+ over_chunk = self._prev_chunk[:over_chunk_size]
+ self._prev_chunk = self._prev_chunk[len(over_chunk) :]
+
+ if len(over_chunk) != over_chunk_size:
+ over_chunk += await self._content.read(4 - len(over_chunk))
+
+ if not over_chunk:
+ self._at_eof = True
+
+ stripped_chunk += b"".join(over_chunk.split())
+ chunk += over_chunk
+ remainder = len(stripped_chunk) % 4
+
+ self._read_bytes += len(chunk)
+ if self._read_bytes == self._length:
+ self._at_eof = True
+ if self._at_eof and await self._content.readline() != b"\r\n":
+ raise ValueError("Reader did not read all the data or it is malformed")
+ return chunk
+
+ async def _read_chunk_from_length(self, size: int) -> bytes:
+ # Reads body part content chunk of the specified size.
+ # The body part must has Content-Length header with proper value.
+ assert self._length is not None, "Content-Length required for chunked read"
+ chunk_size = min(size, self._length - self._read_bytes)
+ chunk = await self._content.read(chunk_size)
+ if self._content.at_eof():
+ self._at_eof = True
+ return chunk
+
+ async def _read_chunk_from_stream(self, size: int) -> bytes:
+ # Reads content chunk of body part with unknown length.
+ # The Content-Length header for body part is not necessary.
+ assert (
+ size >= self._boundary_len
+ ), "Chunk size must be greater or equal than boundary length + 2"
+ first_chunk = self._prev_chunk is None
+ if first_chunk:
+ # We need to re-add the CRLF that got removed from headers parsing.
+ self._prev_chunk = b"\r\n" + await self._content.read(size)
+
+ chunk = b""
+ # content.read() may return less than size, so we need to loop to ensure
+ # we have enough data to detect the boundary.
+ while len(chunk) < self._boundary_len:
+ chunk += await self._content.read(size)
+ self._content_eof += int(self._content.at_eof())
+ if self._content_eof > 2:
+ raise ValueError("Reading after EOF")
+ if self._content_eof:
+ break
+ if len(chunk) > size:
+ self._content.unread_data(chunk[size:])
+ chunk = chunk[:size]
+
+ assert self._prev_chunk is not None
+ window = self._prev_chunk + chunk
+ sub = b"\r\n" + self._boundary
+ if first_chunk:
+ idx = window.find(sub)
+ else:
+ idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
+ if idx >= 0:
+ # pushing boundary back to content
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ self._content.unread_data(window[idx:])
+ self._prev_chunk = self._prev_chunk[:idx]
+ chunk = window[len(self._prev_chunk) : idx]
+ if not chunk:
+ self._at_eof = True
+ result = self._prev_chunk[2 if first_chunk else 0 :] # Strip initial CRLF
+ self._prev_chunk = chunk
+ return result
+
+ async def readline(self) -> bytes:
+ """Reads body part by line by line."""
+ if self._at_eof:
+ return b""
+
+ if self._unread:
+ line = self._unread.popleft()
+ else:
+ line = await self._content.readline()
+
+ if line.startswith(self._boundary):
+ # the very last boundary may not come with \r\n,
+ # so set single rules for everyone
+ sline = line.rstrip(b"\r\n")
+ boundary = self._boundary
+ last_boundary = self._boundary + b"--"
+ # ensure that we read exactly the boundary, not something alike
+ if sline == boundary or sline == last_boundary:
+ self._at_eof = True
+ self._unread.append(line)
+ return b""
+ else:
+ next_line = await self._content.readline()
+ if next_line.startswith(self._boundary):
+ line = line[:-2] # strip CRLF but only once
+ self._unread.append(next_line)
+
+ return line
+
+ async def release(self) -> None:
+ """Like read(), but reads all the data to the void."""
+ if self._at_eof:
+ return
+ while not self._at_eof:
+ await self.read_chunk(self.chunk_size)
+
+ async def text(self, *, encoding: Optional[str] = None) -> str:
+ """Like read(), but assumes that body part contains text data."""
+ data = await self.read(decode=True)
+ # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
+ # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
+ encoding = encoding or self.get_charset(default="utf-8")
+ return data.decode(encoding)
+
+ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
+ """Like read(), but assumes that body parts contains JSON data."""
+ data = await self.read(decode=True)
+ if not data:
+ return None
+ encoding = encoding or self.get_charset(default="utf-8")
+ return cast(Dict[str, Any], json.loads(data.decode(encoding)))
+
+ async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
+ """Like read(), but assumes that body parts contain form urlencoded data."""
+ data = await self.read(decode=True)
+ if not data:
+ return []
+ if encoding is not None:
+ real_encoding = encoding
+ else:
+ real_encoding = self.get_charset(default="utf-8")
+ try:
+ decoded_data = data.rstrip().decode(real_encoding)
+ except UnicodeDecodeError:
+ raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
+
+ return parse_qsl(
+ decoded_data,
+ keep_blank_values=True,
+ encoding=real_encoding,
+ )
+
+ def at_eof(self) -> bool:
+ """Returns True if the boundary was reached or False otherwise."""
+ return self._at_eof
+
+ async def decode(self, data: bytes) -> bytes:
+ """Decodes data.
+
+ Decoding is done according the specified Content-Encoding
+ or Content-Transfer-Encoding headers value.
+ """
+ if CONTENT_TRANSFER_ENCODING in self.headers:
+ data = self._decode_content_transfer(data)
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
+ if not self._is_form_data and CONTENT_ENCODING in self.headers:
+ return await self._decode_content(data)
+ return data
+
+ async def _decode_content(self, data: bytes) -> bytes:
+ encoding = self.headers.get(CONTENT_ENCODING, "").lower()
+ if encoding == "identity":
+ return data
+ if encoding in {"deflate", "gzip"}:
+ return await ZLibDecompressor(
+ encoding=encoding,
+ suppress_deflate_header=True,
+ ).decompress(data, max_length=self._max_decompress_size)
+
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+
+ def _decode_content_transfer(self, data: bytes) -> bytes:
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
+
+ if encoding == "base64":
+ return base64.b64decode(data)
+ elif encoding == "quoted-printable":
+ return binascii.a2b_qp(data)
+ elif encoding in ("binary", "8bit", "7bit"):
+ return data
+ else:
+ raise RuntimeError(f"unknown content transfer encoding: {encoding}")
+
+ def get_charset(self, default: str) -> str:
+ """Returns charset parameter from Content-Type header or default."""
+ ctype = self.headers.get(CONTENT_TYPE, "")
+ mimetype = parse_mimetype(ctype)
+ return mimetype.parameters.get("charset", self._default_charset or default)
+
+ @reify
+ def name(self) -> Optional[str]:
+ """Returns name specified in Content-Disposition header.
+
+ If the header is missing or malformed, returns None.
+ """
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
+ return content_disposition_filename(params, "name")
+
+ @reify
+ def filename(self) -> Optional[str]:
+ """Returns filename specified in Content-Disposition header.
+
+ Returns None if the header is missing or malformed.
+ """
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
+ return content_disposition_filename(params, "filename")
+
+
+@payload_type(BodyPartReader, order=Order.try_first)
+class BodyPartReaderPayload(Payload):
+ _value: BodyPartReader
+ # _autoclose = False (inherited) - Streaming reader that may have resources
+
+ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value, *args, **kwargs)
+
+ params: Dict[str, str] = {}
+ if value.name is not None:
+ params["name"] = value.name
+ if value.filename is not None:
+ params["filename"] = value.filename
+
+ if params:
+ self.set_content_disposition("attachment", True, **params)
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ raise TypeError("Unable to decode.")
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """Raises TypeError as body parts should be consumed via write().
+
+ This is intentional: BodyPartReader payloads are designed for streaming
+ large data (potentially gigabytes) and must be consumed only once via
+ the write() method to avoid memory exhaustion. They cannot be buffered
+ in memory for reuse.
+ """
+ raise TypeError("Unable to read body part as bytes. Use write() to consume.")
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ field = self._value
+ chunk = await field.read_chunk(size=2**16)
+ while chunk:
+ await writer.write(await field.decode(chunk))
+ chunk = await field.read_chunk(size=2**16)
+
+
+class MultipartReader:
+ """Multipart body reader."""
+
+ #: Response wrapper, used when multipart readers constructs from response.
+ response_wrapper_cls = MultipartResponseWrapper
+ #: Multipart reader class, used to handle multipart/* body parts.
+ #: None points to type(self)
+ multipart_reader_cls: Optional[Type["MultipartReader"]] = None
+ #: Body part reader class for non multipart/* content types.
+ part_reader_cls = BodyPartReader
+
+ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
+ self._mimetype = parse_mimetype(headers[CONTENT_TYPE])
+ assert self._mimetype.type == "multipart", "multipart/* content type expected"
+ if "boundary" not in self._mimetype.parameters:
+ raise ValueError(
+ "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE]
+ )
+
+ self.headers = headers
+ self._boundary = ("--" + self._get_boundary()).encode()
+ self._content = content
+ self._default_charset: Optional[str] = None
+ self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
+ self._at_eof = False
+ self._at_bof = True
+ self._unread: List[bytes] = []
+
+ def __aiter__(self: Self) -> Self:
+ return self
+
+ async def __anext__(
+ self,
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ @classmethod
+ def from_response(
+ cls,
+ response: "ClientResponse",
+ ) -> MultipartResponseWrapper:
+ """Constructs reader instance from HTTP response.
+
+ :param response: :class:`~aiohttp.client.ClientResponse` instance
+ """
+ obj = cls.response_wrapper_cls(
+ response, cls(response.headers, response.content)
+ )
+ return obj
+
+ def at_eof(self) -> bool:
+ """Returns True if the final boundary was reached, false otherwise."""
+ return self._at_eof
+
+ async def next(
+ self,
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
+ """Emits the next multipart body part."""
+ # So, if we're at BOF, we need to skip till the boundary.
+ if self._at_eof:
+ return None
+ await self._maybe_release_last_part()
+ if self._at_bof:
+ await self._read_until_first_boundary()
+ self._at_bof = False
+ else:
+ await self._read_boundary()
+ if self._at_eof: # we just read the last boundary, nothing to do there
+ return None
+
+ part = await self.fetch_next_part()
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6
+ if (
+ self._last_part is None
+ and self._mimetype.subtype == "form-data"
+ and isinstance(part, BodyPartReader)
+ ):
+ _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION))
+ if params.get("name") == "_charset_":
+ # Longest encoding in https://encoding.spec.whatwg.org/encodings.json
+ # is 19 characters, so 32 should be more than enough for any valid encoding.
+ charset = await part.read_chunk(32)
+ if len(charset) > 31:
+ raise RuntimeError("Invalid default charset")
+ self._default_charset = charset.strip().decode()
+ part = await self.fetch_next_part()
+ self._last_part = part
+ return self._last_part
+
+ async def release(self) -> None:
+ """Reads all the body parts to the void till the final boundary."""
+ while not self._at_eof:
+ item = await self.next()
+ if item is None:
+ break
+ await item.release()
+
+ async def fetch_next_part(
+ self,
+ ) -> Union["MultipartReader", BodyPartReader]:
+ """Returns the next body part reader."""
+ headers = await self._read_headers()
+ return self._get_part_reader(headers)
+
+ def _get_part_reader(
+ self,
+ headers: "CIMultiDictProxy[str]",
+ ) -> Union["MultipartReader", BodyPartReader]:
+ """Dispatches the response by the `Content-Type` header.
+
+ Returns a suitable reader instance.
+
+ :param dict headers: Response headers
+ """
+ ctype = headers.get(CONTENT_TYPE, "")
+ mimetype = parse_mimetype(ctype)
+
+ if mimetype.type == "multipart":
+ if self.multipart_reader_cls is None:
+ return type(self)(headers, self._content)
+ return self.multipart_reader_cls(headers, self._content)
+ else:
+ return self.part_reader_cls(
+ self._boundary,
+ headers,
+ self._content,
+ subtype=self._mimetype.subtype,
+ default_charset=self._default_charset,
+ )
+
+ def _get_boundary(self) -> str:
+ boundary = self._mimetype.parameters["boundary"]
+ if len(boundary) > 70:
+ raise ValueError("boundary %r is too long (70 chars max)" % boundary)
+
+ return boundary
+
+ async def _readline(self) -> bytes:
+ if self._unread:
+ return self._unread.pop()
+ return await self._content.readline()
+
+ async def _read_until_first_boundary(self) -> None:
+ while True:
+ chunk = await self._readline()
+ if chunk == b"":
+ raise ValueError(
+ "Could not find starting boundary %r" % (self._boundary)
+ )
+ chunk = chunk.rstrip()
+ if chunk == self._boundary:
+ return
+ elif chunk == self._boundary + b"--":
+ self._at_eof = True
+ return
+
+ async def _read_boundary(self) -> None:
+ chunk = (await self._readline()).rstrip()
+ if chunk == self._boundary:
+ pass
+ elif chunk == self._boundary + b"--":
+ self._at_eof = True
+ epilogue = await self._readline()
+ next_line = await self._readline()
+
+ # the epilogue is expected and then either the end of input or the
+ # parent multipart boundary, if the parent boundary is found then
+ # it should be marked as unread and handed to the parent for
+ # processing
+ if next_line[:2] == b"--":
+ self._unread.append(next_line)
+ # otherwise the request is likely missing an epilogue and both
+ # lines should be passed to the parent for processing
+ # (this handles the old behavior gracefully)
+ else:
+ self._unread.extend([next_line, epilogue])
+ else:
+ raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
+
+ async def _read_headers(self) -> "CIMultiDictProxy[str]":
+ lines = []
+ while True:
+ chunk = await self._content.readline()
+ chunk = chunk.rstrip(b"\r\n")
+ lines.append(chunk)
+ if not chunk:
+ break
+ parser = HeadersParser()
+ headers, raw_headers = parser.parse_headers(lines)
+ return headers
+
+ async def _maybe_release_last_part(self) -> None:
+ """Ensures that the last read body part is read completely."""
+ if self._last_part is not None:
+ if not self._last_part.at_eof():
+ await self._last_part.release()
+ self._unread.extend(self._last_part._unread)
+ self._last_part = None
+
+
+_Part = Tuple[Payload, str, str]
+
+
+class MultipartWriter(Payload):
+ """Multipart body writer."""
+
+ _value: None
+ # _consumed = False (inherited) - Can be encoded multiple times
+ _autoclose = True # No file handles, just collects parts in memory
+
+ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
+ boundary = boundary if boundary is not None else uuid.uuid4().hex
+ # The underlying Payload API demands a str (utf-8), not bytes,
+ # so we need to ensure we don't lose anything during conversion.
+ # As a result, require the boundary to be ASCII only.
+ # In both situations.
+
+ try:
+ self._boundary = boundary.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError("boundary should contain ASCII only chars") from None
+ ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
+
+ super().__init__(None, content_type=ctype)
+
+ self._parts: List[_Part] = []
+ self._is_form_data = subtype == "form-data"
+
+ def __enter__(self) -> "MultipartWriter":
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ pass
+
+ def __iter__(self) -> Iterator[_Part]:
+ return iter(self._parts)
+
+ def __len__(self) -> int:
+ return len(self._parts)
+
+ def __bool__(self) -> bool:
+ return True
+
+ _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
+ _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
+
+ @property
+ def _boundary_value(self) -> str:
+ """Wrap boundary parameter value in quotes, if necessary.
+
+ Reads self.boundary and returns a unicode string.
+ """
+ # Refer to RFCs 7231, 7230, 5234.
+ #
+ # parameter = token "=" ( token / quoted-string )
+ # token = 1*tchar
+ # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
+ # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
+ # obs-text = %x80-FF
+ # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
+ # / DIGIT / ALPHA
+ # ; any VCHAR, except delimiters
+ # VCHAR = %x21-7E
+ value = self._boundary
+ if re.match(self._valid_tchar_regex, value):
+ return value.decode("ascii") # cannot fail
+
+ if re.search(self._invalid_qdtext_char_regex, value):
+ raise ValueError("boundary value contains invalid characters")
+
+ # escape %x5C and %x22
+ quoted_value_content = value.replace(b"\\", b"\\\\")
+ quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
+
+ return '"' + quoted_value_content.decode("ascii") + '"'
+
+ @property
+ def boundary(self) -> str:
+ return self._boundary.decode("ascii")
+
+ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload:
+ if headers is None:
+ headers = CIMultiDict()
+
+ if isinstance(obj, Payload):
+ obj.headers.update(headers)
+ return self.append_payload(obj)
+ else:
+ try:
+ payload = get_payload(obj, headers=headers)
+ except LookupError:
+ raise TypeError("Cannot create payload from %r" % obj)
+ else:
+ return self.append_payload(payload)
+
+ def append_payload(self, payload: Payload) -> Payload:
+ """Adds a new body part to multipart writer."""
+ encoding: Optional[str] = None
+ te_encoding: Optional[str] = None
+ if self._is_form_data:
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
+ assert (
+ not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING}
+ & payload.headers.keys()
+ )
+ # Set default Content-Disposition in case user doesn't create one
+ if CONTENT_DISPOSITION not in payload.headers:
+ name = f"section-{len(self._parts)}"
+ payload.set_content_disposition("form-data", name=name)
+ else:
+ # compression
+ encoding = payload.headers.get(CONTENT_ENCODING, "").lower()
+ if encoding and encoding not in ("deflate", "gzip", "identity"):
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+ if encoding == "identity":
+ encoding = None
+
+ # te encoding
+ te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
+ if te_encoding not in ("", "base64", "quoted-printable", "binary"):
+ raise RuntimeError(f"unknown content transfer encoding: {te_encoding}")
+ if te_encoding == "binary":
+ te_encoding = None
+
+ # size
+ size = payload.size
+ if size is not None and not (encoding or te_encoding):
+ payload.headers[CONTENT_LENGTH] = str(size)
+
+ self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
+ return payload
+
+ def append_json(
+ self, obj: Any, headers: Optional[Mapping[str, str]] = None
+ ) -> Payload:
+ """Helper to append JSON part."""
+ if headers is None:
+ headers = CIMultiDict()
+
+ return self.append_payload(JsonPayload(obj, headers=headers))
+
+ def append_form(
+ self,
+ obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
+ headers: Optional[Mapping[str, str]] = None,
+ ) -> Payload:
+ """Helper to append form urlencoded part."""
+ assert isinstance(obj, (Sequence, Mapping))
+
+ if headers is None:
+ headers = CIMultiDict()
+
+ if isinstance(obj, Mapping):
+ obj = list(obj.items())
+ data = urlencode(obj, doseq=True)
+
+ return self.append_payload(
+ StringPayload(
+ data, headers=headers, content_type="application/x-www-form-urlencoded"
+ )
+ )
+
+ @property
+ def size(self) -> Optional[int]:
+ """Size of the payload."""
+ total = 0
+ for part, encoding, te_encoding in self._parts:
+ part_size = part.size
+ if encoding or te_encoding or part_size is None:
+ return None
+
+ total += int(
+ 2
+ + len(self._boundary)
+ + 2
+ + part_size # b'--'+self._boundary+b'\r\n'
+ + len(part._binary_headers)
+ + 2 # b'\r\n'
+ )
+
+ total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
+ return total
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """Return string representation of the multipart data.
+
+ WARNING: This method may do blocking I/O if parts contain file payloads.
+ It should not be called in the event loop. Use as_bytes().decode() instead.
+ """
+ return "".join(
+ "--"
+ + self.boundary
+ + "\r\n"
+ + part._binary_headers.decode(encoding, errors)
+ + part.decode()
+ for part, _e, _te in self._parts
+ )
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """Return bytes representation of the multipart data.
+
+ This method is async-safe and calls as_bytes on underlying payloads.
+ """
+ parts: List[bytes] = []
+
+ # Process each part
+ for part, _e, _te in self._parts:
+ # Add boundary
+ parts.append(b"--" + self._boundary + b"\r\n")
+
+ # Add headers
+ parts.append(part._binary_headers)
+
+ # Add payload content using as_bytes for async safety
+ part_bytes = await part.as_bytes(encoding, errors)
+ parts.append(part_bytes)
+
+ # Add trailing CRLF
+ parts.append(b"\r\n")
+
+ # Add closing boundary
+ parts.append(b"--" + self._boundary + b"--\r\n")
+
+ return b"".join(parts)
+
+ async def write(
+ self, writer: AbstractStreamWriter, close_boundary: bool = True
+ ) -> None:
+ """Write body."""
+ for part, encoding, te_encoding in self._parts:
+ if self._is_form_data:
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
+ assert CONTENT_DISPOSITION in part.headers
+ assert "name=" in part.headers[CONTENT_DISPOSITION]
+
+ await writer.write(b"--" + self._boundary + b"\r\n")
+ await writer.write(part._binary_headers)
+
+ if encoding or te_encoding:
+ w = MultipartPayloadWriter(writer)
+ if encoding:
+ w.enable_compression(encoding)
+ if te_encoding:
+ w.enable_encoding(te_encoding)
+ await part.write(w) # type: ignore[arg-type]
+ await w.write_eof()
+ else:
+ await part.write(writer)
+
+ await writer.write(b"\r\n")
+
+ if close_boundary:
+ await writer.write(b"--" + self._boundary + b"--\r\n")
+
+ async def close(self) -> None:
+ """
+ Close all part payloads that need explicit closing.
+
+ IMPORTANT: This method must not await anything that might not finish
+ immediately, as it may be called during cleanup/cancellation. Schedule
+ any long-running operations without awaiting them.
+ """
+ if self._consumed:
+ return
+ self._consumed = True
+
+ # Close all parts that need explicit closing
+ # We catch and log exceptions to ensure all parts get a chance to close
+ # we do not use asyncio.gather() here because we are not allowed
+ # to suspend given we may be called during cleanup
+ for idx, (part, _, _) in enumerate(self._parts):
+ if not part.autoclose and not part.consumed:
+ try:
+ await part.close()
+ except Exception as exc:
+ internal_logger.error(
+ "Failed to close multipart part %d: %s", idx, exc, exc_info=True
+ )
+
+
+class MultipartPayloadWriter:
+ def __init__(self, writer: AbstractStreamWriter) -> None:
+ self._writer = writer
+ self._encoding: Optional[str] = None
+ self._compress: Optional[ZLibCompressor] = None
+ self._encoding_buffer: Optional[bytearray] = None
+
+ def enable_encoding(self, encoding: str) -> None:
+ if encoding == "base64":
+ self._encoding = encoding
+ self._encoding_buffer = bytearray()
+ elif encoding == "quoted-printable":
+ self._encoding = "quoted-printable"
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: Optional[int] = None
+ ) -> None:
+ self._compress = ZLibCompressor(
+ encoding=encoding,
+ suppress_deflate_header=True,
+ strategy=strategy,
+ )
+
+ async def write_eof(self) -> None:
+ if self._compress is not None:
+ chunk = self._compress.flush()
+ if chunk:
+ self._compress = None
+ await self.write(chunk)
+
+ if self._encoding == "base64":
+ if self._encoding_buffer:
+ await self._writer.write(base64.b64encode(self._encoding_buffer))
+
+ async def write(self, chunk: bytes) -> None:
+ if self._compress is not None:
+ if chunk:
+ chunk = await self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self._encoding == "base64":
+ buf = self._encoding_buffer
+ assert buf is not None
+ buf.extend(chunk)
+
+ if buf:
+ div, mod = divmod(len(buf), 3)
+ enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
+ if enc_chunk:
+ b64chunk = base64.b64encode(enc_chunk)
+ await self._writer.write(b64chunk)
+ elif self._encoding == "quoted-printable":
+ await self._writer.write(binascii.b2a_qp(chunk))
+ else:
+ await self._writer.write(chunk)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/payload.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/payload.py"
new file mode 100644
index 0000000..5b88fa0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/payload.py"
@@ -0,0 +1,1120 @@
+import asyncio
+import enum
+import io
+import json
+import mimetypes
+import os
+import sys
+import warnings
+from abc import ABC, abstractmethod
+from collections.abc import Iterable
+from itertools import chain
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Final,
+ List,
+ Optional,
+ Set,
+ TextIO,
+ Tuple,
+ Type,
+ Union,
+)
+
+from multidict import CIMultiDict
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import (
+ _SENTINEL,
+ content_disposition_header,
+ guess_filename,
+ parse_mimetype,
+ sentinel,
+)
+from .streams import StreamReader
+from .typedefs import JSONEncoder, _CIMultiDict
+
+__all__ = (
+ "PAYLOAD_REGISTRY",
+ "get_payload",
+ "payload_type",
+ "Payload",
+ "BytesPayload",
+ "StringPayload",
+ "IOBasePayload",
+ "BytesIOPayload",
+ "BufferedReaderPayload",
+ "TextIOPayload",
+ "StringIOPayload",
+ "JsonPayload",
+ "AsyncIterablePayload",
+)
+
+TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
+READ_SIZE: Final[int] = 2**16 # 64 KB
+_CLOSE_FUTURES: Set[asyncio.Future[None]] = set()
+
+
+class LookupError(Exception):
+ """Raised when no payload factory is found for the given data type."""
+
+
+class Order(str, enum.Enum):
+ normal = "normal"
+ try_first = "try_first"
+ try_last = "try_last"
+
+
+def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
+ return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
+
+
+def register_payload(
+ factory: Type["Payload"], type: Any, *, order: Order = Order.normal
+) -> None:
+ PAYLOAD_REGISTRY.register(factory, type, order=order)
+
+
+class payload_type:
+ def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
+ self.type = type
+ self.order = order
+
+ def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
+ register_payload(factory, self.type, order=self.order)
+ return factory
+
+
+PayloadType = Type["Payload"]
+_PayloadRegistryItem = Tuple[PayloadType, Any]
+
+
+class PayloadRegistry:
+ """Payload registry.
+
+ note: we need zope.interface for more efficient adapter search
+ """
+
+ __slots__ = ("_first", "_normal", "_last", "_normal_lookup")
+
+ def __init__(self) -> None:
+ self._first: List[_PayloadRegistryItem] = []
+ self._normal: List[_PayloadRegistryItem] = []
+ self._last: List[_PayloadRegistryItem] = []
+ self._normal_lookup: Dict[Any, PayloadType] = {}
+
+ def get(
+ self,
+ data: Any,
+ *args: Any,
+ _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
+ **kwargs: Any,
+ ) -> "Payload":
+ if self._first:
+ for factory, type_ in self._first:
+ if isinstance(data, type_):
+ return factory(data, *args, **kwargs)
+ # Try the fast lookup first
+ if lookup_factory := self._normal_lookup.get(type(data)):
+ return lookup_factory(data, *args, **kwargs)
+ # Bail early if its already a Payload
+ if isinstance(data, Payload):
+ return data
+ # Fallback to the slower linear search
+ for factory, type_ in _CHAIN(self._normal, self._last):
+ if isinstance(data, type_):
+ return factory(data, *args, **kwargs)
+ raise LookupError()
+
+ def register(
+ self, factory: PayloadType, type: Any, *, order: Order = Order.normal
+ ) -> None:
+ if order is Order.try_first:
+ self._first.append((factory, type))
+ elif order is Order.normal:
+ self._normal.append((factory, type))
+ if isinstance(type, Iterable):
+ for t in type:
+ self._normal_lookup[t] = factory
+ else:
+ self._normal_lookup[type] = factory
+ elif order is Order.try_last:
+ self._last.append((factory, type))
+ else:
+ raise ValueError(f"Unsupported order {order!r}")
+
+
+class Payload(ABC):
+
+ _default_content_type: str = "application/octet-stream"
+ _size: Optional[int] = None
+ _consumed: bool = False # Default: payload has not been consumed yet
+ _autoclose: bool = False # Default: assume resource needs explicit closing
+
+ def __init__(
+ self,
+ value: Any,
+ headers: Optional[
+ Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
+ ] = None,
+ content_type: Union[str, None, _SENTINEL] = sentinel,
+ filename: Optional[str] = None,
+ encoding: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._encoding = encoding
+ self._filename = filename
+ self._headers: _CIMultiDict = CIMultiDict()
+ self._value = value
+ if content_type is not sentinel and content_type is not None:
+ self._headers[hdrs.CONTENT_TYPE] = content_type
+ elif self._filename is not None:
+ if sys.version_info >= (3, 13):
+ guesser = mimetypes.guess_file_type
+ else:
+ guesser = mimetypes.guess_type
+ content_type = guesser(self._filename)[0]
+ if content_type is None:
+ content_type = self._default_content_type
+ self._headers[hdrs.CONTENT_TYPE] = content_type
+ else:
+ self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
+ if headers:
+ self._headers.update(headers)
+
+ @property
+ def size(self) -> Optional[int]:
+ """Size of the payload in bytes.
+
+ Returns the number of bytes that will be transmitted when the payload
+ is written. For string payloads, this is the size after encoding to bytes,
+ not the length of the string.
+ """
+ return self._size
+
+ @property
+ def filename(self) -> Optional[str]:
+ """Filename of the payload."""
+ return self._filename
+
+ @property
+ def headers(self) -> _CIMultiDict:
+ """Custom item headers"""
+ return self._headers
+
+ @property
+ def _binary_headers(self) -> bytes:
+ return (
+ "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
+ "utf-8"
+ )
+ + b"\r\n"
+ )
+
+ @property
+ def encoding(self) -> Optional[str]:
+ """Payload encoding"""
+ return self._encoding
+
+ @property
+ def content_type(self) -> str:
+ """Content type"""
+ return self._headers[hdrs.CONTENT_TYPE]
+
+ @property
+ def consumed(self) -> bool:
+ """Whether the payload has been consumed and cannot be reused."""
+ return self._consumed
+
+ @property
+ def autoclose(self) -> bool:
+ """
+ Whether the payload can close itself automatically.
+
+ Returns True if the payload has no file handles or resources that need
+ explicit closing. If False, callers must await close() to release resources.
+ """
+ return self._autoclose
+
+ def set_content_disposition(
+ self,
+ disptype: str,
+ quote_fields: bool = True,
+ _charset: str = "utf-8",
+ **params: Any,
+ ) -> None:
+ """Sets ``Content-Disposition`` header."""
+ self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
+ disptype, quote_fields=quote_fields, _charset=_charset, **params
+ )
+
+ @abstractmethod
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """
+ Return string representation of the value.
+
+ This is named decode() to allow compatibility with bytes objects.
+ """
+
+ @abstractmethod
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """
+ Write payload to the writer stream.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+
+ This is a legacy method that writes the entire payload without length constraints.
+
+ Important:
+ For new implementations, use write_with_length() instead of this method.
+ This method is maintained for backwards compatibility and will eventually
+ delegate to write_with_length(writer, None) in all implementations.
+
+ All payload subclasses must override this method for backwards compatibility,
+ but new code should use write_with_length for more flexibility and control.
+
+ """
+
+ # write_with_length is new in aiohttp 3.12
+ # it should be overridden by subclasses
+ async def write_with_length(
+ self, writer: AbstractStreamWriter, content_length: Optional[int]
+ ) -> None:
+ """
+ Write payload with a specific content length constraint.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+ content_length: Maximum number of bytes to write (None for unlimited)
+
+ This method allows writing payload content with a specific length constraint,
+ which is particularly useful for HTTP responses with Content-Length header.
+
+ Note:
+ This is the base implementation that provides backwards compatibility
+ for subclasses that don't override this method. Specific payload types
+ should override this method to implement proper length-constrained writing.
+
+ """
+ # Backwards compatibility for subclasses that don't override this method
+ # and for the default implementation
+ await self.write(writer)
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This is a convenience method that calls decode() and encodes the result
+ to bytes using the specified encoding.
+ """
+ # Use instance encoding if available, otherwise use parameter
+ actual_encoding = self._encoding or encoding
+ return self.decode(actual_encoding, errors).encode(actual_encoding)
+
+ def _close(self) -> None:
+ """
+ Async safe synchronous close operations for backwards compatibility.
+
+ This method exists only for backwards compatibility with code that
+ needs to clean up payloads synchronously. In the future, we will
+ drop this method and only support the async close() method.
+
+ WARNING: This method must be safe to call from within the event loop
+ without blocking. Subclasses should not perform any blocking I/O here.
+
+ WARNING: This method must be called from within an event loop for
+ certain payload types (e.g., IOBasePayload). Calling it outside an
+ event loop may raise RuntimeError.
+ """
+ # This is a no-op by default, but subclasses can override it
+ # for non-blocking cleanup operations.
+
+ async def close(self) -> None:
+ """
+ Close the payload if it holds any resources.
+
+ IMPORTANT: This method must not await anything that might not finish
+ immediately, as it may be called during cleanup/cancellation. Schedule
+ any long-running operations without awaiting them.
+
+ In the future, this will be the only close method supported.
+ """
+ self._close()
+
+
+class BytesPayload(Payload):
+ _value: bytes
+ # _consumed = False (inherited) - Bytes are immutable and can be reused
+ _autoclose = True # No file handle, just bytes in memory
+
+ def __init__(
+ self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
+ ) -> None:
+ if "content_type" not in kwargs:
+ kwargs["content_type"] = "application/octet-stream"
+
+ super().__init__(value, *args, **kwargs)
+
+ if isinstance(value, memoryview):
+ self._size = value.nbytes
+ elif isinstance(value, (bytes, bytearray)):
+ self._size = len(value)
+ else:
+ raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
+
+ if self._size > TOO_LARGE_BYTES_BODY:
+ kwargs = {"source": self}
+ warnings.warn(
+ "Sending a large body directly with raw bytes might"
+ " lock the event loop. You should probably pass an "
+ "io.BytesIO object instead",
+ ResourceWarning,
+ **kwargs,
+ )
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ return self._value.decode(encoding, errors)
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method returns the raw bytes content of the payload.
+ It is equivalent to accessing the _value attribute directly.
+ """
+ return self._value
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """
+ Write the entire bytes payload to the writer stream.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+
+ This method writes the entire bytes content without any length constraint.
+
+ Note:
+ For new implementations that need length control, use write_with_length().
+ This method is maintained for backwards compatibility and is equivalent
+ to write_with_length(writer, None).
+
+ """
+ await writer.write(self._value)
+
+ async def write_with_length(
+ self, writer: AbstractStreamWriter, content_length: Optional[int]
+ ) -> None:
+ """
+ Write bytes payload with a specific content length constraint.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+ content_length: Maximum number of bytes to write (None for unlimited)
+
+ This method writes either the entire byte sequence or a slice of it
+ up to the specified content_length. For BytesPayload, this operation
+ is performed efficiently using array slicing.
+
+ """
+ if content_length is not None:
+ await writer.write(self._value[:content_length])
+ else:
+ await writer.write(self._value)
+
+
+class StringPayload(BytesPayload):
+ def __init__(
+ self,
+ value: str,
+ *args: Any,
+ encoding: Optional[str] = None,
+ content_type: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+
+ if encoding is None:
+ if content_type is None:
+ real_encoding = "utf-8"
+ content_type = "text/plain; charset=utf-8"
+ else:
+ mimetype = parse_mimetype(content_type)
+ real_encoding = mimetype.parameters.get("charset", "utf-8")
+ else:
+ if content_type is None:
+ content_type = "text/plain; charset=%s" % encoding
+ real_encoding = encoding
+
+ super().__init__(
+ value.encode(real_encoding),
+ encoding=real_encoding,
+ content_type=content_type,
+ *args,
+ **kwargs,
+ )
+
+
+class StringIOPayload(StringPayload):
+ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
+ super().__init__(value.read(), *args, **kwargs)
+
+
+class IOBasePayload(Payload):
+ _value: io.IOBase
+ # _consumed = False (inherited) - File can be re-read from the same position
+ _start_position: Optional[int] = None
+ # _autoclose = False (inherited) - Has file handle that needs explicit closing
+
+ def __init__(
+ self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
+ ) -> None:
+ if "filename" not in kwargs:
+ kwargs["filename"] = guess_filename(value)
+
+ super().__init__(value, *args, **kwargs)
+
+ if self._filename is not None and disposition is not None:
+ if hdrs.CONTENT_DISPOSITION not in self.headers:
+ self.set_content_disposition(disposition, filename=self._filename)
+
+ def _set_or_restore_start_position(self) -> None:
+ """Set or restore the start position of the file-like object."""
+ if self._start_position is None:
+ try:
+ self._start_position = self._value.tell()
+ except (OSError, AttributeError):
+ self._consumed = True # Cannot seek, mark as consumed
+ return
+ try:
+ self._value.seek(self._start_position)
+ except (OSError, AttributeError):
+ # Failed to seek back - mark as consumed since we've already read
+ self._consumed = True
+
+ def _read_and_available_len(
+ self, remaining_content_len: Optional[int]
+ ) -> Tuple[Optional[int], bytes]:
+ """
+ Read the file-like object and return both its total size and the first chunk.
+
+ Args:
+ remaining_content_len: Optional limit on how many bytes to read in this operation.
+ If None, READ_SIZE will be used as the default chunk size.
+
+ Returns:
+ A tuple containing:
+ - The total size of the remaining unread content (None if size cannot be determined)
+ - The first chunk of bytes read from the file object
+
+ This method is optimized to perform both size calculation and initial read
+ in a single operation, which is executed in a single executor job to minimize
+ context switches and file operations when streaming content.
+
+ """
+ self._set_or_restore_start_position()
+ size = self.size # Call size only once since it does I/O
+ return size, self._value.read(
+ min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE)
+ )
+
+ def _read(self, remaining_content_len: Optional[int]) -> bytes:
+ """
+ Read a chunk of data from the file-like object.
+
+ Args:
+ remaining_content_len: Optional maximum number of bytes to read.
+ If None, READ_SIZE will be used as the default chunk size.
+
+ Returns:
+ A chunk of bytes read from the file object, respecting the
+ remaining_content_len limit if specified.
+
+ This method is used for subsequent reads during streaming after
+ the initial _read_and_available_len call has been made.
+
+ """
+ return self._value.read(remaining_content_len or READ_SIZE) # type: ignore[no-any-return]
+
+ @property
+ def size(self) -> Optional[int]:
+ """
+ Size of the payload in bytes.
+
+ Returns the total size of the payload content from the initial position.
+ This ensures consistent Content-Length for requests, including 307/308 redirects
+ where the same payload instance is reused.
+
+ Returns None if the size cannot be determined (e.g., for unseekable streams).
+ """
+ try:
+ # Store the start position on first access.
+ # This is critical when the same payload instance is reused (e.g., 307/308
+ # redirects). Without storing the initial position, after the payload is
+ # read once, the file position would be at EOF, which would cause the
+ # size calculation to return 0 (file_size - EOF position).
+ # By storing the start position, we ensure the size calculation always
+ # returns the correct total size for any subsequent use.
+ if self._start_position is None:
+ self._start_position = self._value.tell()
+
+ # Return the total size from the start position
+ # This ensures Content-Length is correct even after reading
+ return os.fstat(self._value.fileno()).st_size - self._start_position
+ except (AttributeError, OSError):
+ return None
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """
+ Write the entire file-like payload to the writer stream.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+
+ This method writes the entire file content without any length constraint.
+ It delegates to write_with_length() with no length limit for implementation
+ consistency.
+
+ Note:
+ For new implementations that need length control, use write_with_length() directly.
+ This method is maintained for backwards compatibility with existing code.
+
+ """
+ await self.write_with_length(writer, None)
+
+ async def write_with_length(
+ self, writer: AbstractStreamWriter, content_length: Optional[int]
+ ) -> None:
+ """
+ Write file-like payload with a specific content length constraint.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+ content_length: Maximum number of bytes to write (None for unlimited)
+
+ This method implements optimized streaming of file content with length constraints:
+
+ 1. File reading is performed in a thread pool to avoid blocking the event loop
+ 2. Content is read and written in chunks to maintain memory efficiency
+ 3. Writing stops when either:
+ - All available file content has been written (when size is known)
+ - The specified content_length has been reached
+ 4. File resources are properly closed even if the operation is cancelled
+
+ The implementation carefully handles both known-size and unknown-size payloads,
+ as well as constrained and unconstrained content lengths.
+
+ """
+ loop = asyncio.get_running_loop()
+ total_written_len = 0
+ remaining_content_len = content_length
+
+ # Get initial data and available length
+ available_len, chunk = await loop.run_in_executor(
+ None, self._read_and_available_len, remaining_content_len
+ )
+ # Process data chunks until done
+ while chunk:
+ chunk_len = len(chunk)
+
+ # Write data with or without length constraint
+ if remaining_content_len is None:
+ await writer.write(chunk)
+ else:
+ await writer.write(chunk[:remaining_content_len])
+ remaining_content_len -= chunk_len
+
+ total_written_len += chunk_len
+
+ # Check if we're done writing
+ if self._should_stop_writing(
+ available_len, total_written_len, remaining_content_len
+ ):
+ return
+
+ # Read next chunk
+ chunk = await loop.run_in_executor(
+ None,
+ self._read,
+ (
+ min(READ_SIZE, remaining_content_len)
+ if remaining_content_len is not None
+ else READ_SIZE
+ ),
+ )
+
+ def _should_stop_writing(
+ self,
+ available_len: Optional[int],
+ total_written_len: int,
+ remaining_content_len: Optional[int],
+ ) -> bool:
+ """
+ Determine if we should stop writing data.
+
+ Args:
+ available_len: Known size of the payload if available (None if unknown)
+ total_written_len: Number of bytes already written
+ remaining_content_len: Remaining bytes to be written for content-length limited responses
+
+ Returns:
+ True if we should stop writing data, based on either:
+ - Having written all available data (when size is known)
+ - Having written all requested content (when content-length is specified)
+
+ """
+ return (available_len is not None and total_written_len >= available_len) or (
+ remaining_content_len is not None and remaining_content_len <= 0
+ )
+
+ def _close(self) -> None:
+ """
+ Async safe synchronous close operations for backwards compatibility.
+
+ This method exists only for backwards
+ compatibility. Use the async close() method instead.
+
+ WARNING: This method MUST be called from within an event loop.
+ Calling it outside an event loop will raise RuntimeError.
+ """
+ # Skip if already consumed
+ if self._consumed:
+ return
+ self._consumed = True # Mark as consumed to prevent further writes
+ # Schedule file closing without awaiting to prevent cancellation issues
+ loop = asyncio.get_running_loop()
+ close_future = loop.run_in_executor(None, self._value.close)
+ # Hold a strong reference to the future to prevent it from being
+ # garbage collected before it completes.
+ _CLOSE_FUTURES.add(close_future)
+ close_future.add_done_callback(_CLOSE_FUTURES.remove)
+
+ async def close(self) -> None:
+ """
+ Close the payload if it holds any resources.
+
+ IMPORTANT: This method must not await anything that might not finish
+ immediately, as it may be called during cleanup/cancellation. Schedule
+ any long-running operations without awaiting them.
+ """
+ self._close()
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """
+ Return string representation of the value.
+
+ WARNING: This method does blocking I/O and should not be called in the event loop.
+ """
+ return self._read_all().decode(encoding, errors)
+
+ def _read_all(self) -> bytes:
+ """Read the entire file-like object and return its content as bytes."""
+ self._set_or_restore_start_position()
+ # Use readlines() to ensure we get all content
+ return b"".join(self._value.readlines())
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire file content and returns it as bytes.
+ It is equivalent to reading the file-like object directly.
+ The file reading is performed in an executor to avoid blocking the event loop.
+ """
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(None, self._read_all)
+
+
+class TextIOPayload(IOBasePayload):
+ _value: io.TextIOBase
+ # _autoclose = False (inherited) - Has text file handle that needs explicit closing
+
+ def __init__(
+ self,
+ value: TextIO,
+ *args: Any,
+ encoding: Optional[str] = None,
+ content_type: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+
+ if encoding is None:
+ if content_type is None:
+ encoding = "utf-8"
+ content_type = "text/plain; charset=utf-8"
+ else:
+ mimetype = parse_mimetype(content_type)
+ encoding = mimetype.parameters.get("charset", "utf-8")
+ else:
+ if content_type is None:
+ content_type = "text/plain; charset=%s" % encoding
+
+ super().__init__(
+ value,
+ content_type=content_type,
+ encoding=encoding,
+ *args,
+ **kwargs,
+ )
+
+ def _read_and_available_len(
+ self, remaining_content_len: Optional[int]
+ ) -> Tuple[Optional[int], bytes]:
+ """
+ Read the text file-like object and return both its total size and the first chunk.
+
+ Args:
+ remaining_content_len: Optional limit on how many bytes to read in this operation.
+ If None, READ_SIZE will be used as the default chunk size.
+
+ Returns:
+ A tuple containing:
+ - The total size of the remaining unread content (None if size cannot be determined)
+ - The first chunk of bytes read from the file object, encoded using the payload's encoding
+
+ This method is optimized to perform both size calculation and initial read
+ in a single operation, which is executed in a single executor job to minimize
+ context switches and file operations when streaming content.
+
+ Note:
+ TextIOPayload handles encoding of the text content before writing it
+ to the stream. If no encoding is specified, UTF-8 is used as the default.
+
+ """
+ self._set_or_restore_start_position()
+ size = self.size
+ chunk = self._value.read(
+ min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE)
+ )
+ return size, chunk.encode(self._encoding) if self._encoding else chunk.encode()
+
+ def _read(self, remaining_content_len: Optional[int]) -> bytes:
+ """
+ Read a chunk of data from the text file-like object.
+
+ Args:
+ remaining_content_len: Optional maximum number of bytes to read.
+ If None, READ_SIZE will be used as the default chunk size.
+
+ Returns:
+ A chunk of bytes read from the file object and encoded using the payload's
+ encoding. The data is automatically converted from text to bytes.
+
+ This method is used for subsequent reads during streaming after
+ the initial _read_and_available_len call has been made. It properly
+ handles text encoding, converting the text content to bytes using
+ the specified encoding (or UTF-8 if none was provided).
+
+ """
+ chunk = self._value.read(remaining_content_len or READ_SIZE)
+ return chunk.encode(self._encoding) if self._encoding else chunk.encode()
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """
+ Return string representation of the value.
+
+ WARNING: This method does blocking I/O and should not be called in the event loop.
+ """
+ self._set_or_restore_start_position()
+ return self._value.read()
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire text file content and returns it as bytes.
+ It encodes the text content using the specified encoding.
+ The file reading is performed in an executor to avoid blocking the event loop.
+ """
+ loop = asyncio.get_running_loop()
+
+ # Use instance encoding if available, otherwise use parameter
+ actual_encoding = self._encoding or encoding
+
+ def _read_and_encode() -> bytes:
+ self._set_or_restore_start_position()
+ # TextIO read() always returns the full content
+ return self._value.read().encode(actual_encoding, errors)
+
+ return await loop.run_in_executor(None, _read_and_encode)
+
+
+class BytesIOPayload(IOBasePayload):
+ _value: io.BytesIO
+ _size: int # Always initialized in __init__
+ _autoclose = True # BytesIO is in-memory, safe to auto-close
+
+ def __init__(self, value: io.BytesIO, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value, *args, **kwargs)
+ # Calculate size once during initialization
+ self._size = len(self._value.getbuffer()) - self._value.tell()
+
+ @property
+ def size(self) -> int:
+ """Size of the payload in bytes.
+
+ Returns the number of bytes in the BytesIO buffer that will be transmitted.
+ This is calculated once during initialization for efficiency.
+ """
+ return self._size
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ self._set_or_restore_start_position()
+ return self._value.read().decode(encoding, errors)
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ return await self.write_with_length(writer, None)
+
+ async def write_with_length(
+ self, writer: AbstractStreamWriter, content_length: Optional[int]
+ ) -> None:
+ """
+ Write BytesIO payload with a specific content length constraint.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+ content_length: Maximum number of bytes to write (None for unlimited)
+
+ This implementation is specifically optimized for BytesIO objects:
+
+ 1. Reads content in chunks to maintain memory efficiency
+ 2. Yields control back to the event loop periodically to prevent blocking
+ when dealing with large BytesIO objects
+ 3. Respects content_length constraints when specified
+ 4. Properly cleans up by closing the BytesIO object when done or on error
+
+ The periodic yielding to the event loop is important for maintaining
+ responsiveness when processing large in-memory buffers.
+
+ """
+ self._set_or_restore_start_position()
+ loop_count = 0
+ remaining_bytes = content_length
+ while chunk := self._value.read(READ_SIZE):
+ if loop_count > 0:
+ # Avoid blocking the event loop
+ # if they pass a large BytesIO object
+ # and we are not in the first iteration
+ # of the loop
+ await asyncio.sleep(0)
+ if remaining_bytes is None:
+ await writer.write(chunk)
+ else:
+ await writer.write(chunk[:remaining_bytes])
+ remaining_bytes -= len(chunk)
+ if remaining_bytes <= 0:
+ return
+ loop_count += 1
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire BytesIO content and returns it as bytes.
+ It is equivalent to accessing the _value attribute directly.
+ """
+ self._set_or_restore_start_position()
+ return self._value.read()
+
+ async def close(self) -> None:
+ """
+ Close the BytesIO payload.
+
+ This does nothing since BytesIO is in-memory and does not require explicit closing.
+ """
+
+
+class BufferedReaderPayload(IOBasePayload):
+ _value: io.BufferedIOBase
+ # _autoclose = False (inherited) - Has buffered file handle that needs explicit closing
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ self._set_or_restore_start_position()
+ return self._value.read().decode(encoding, errors)
+
+
+class JsonPayload(BytesPayload):
+ def __init__(
+ self,
+ value: Any,
+ encoding: str = "utf-8",
+ content_type: str = "application/json",
+ dumps: JSONEncoder = json.dumps,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+
+ super().__init__(
+ dumps(value).encode(encoding),
+ content_type=content_type,
+ encoding=encoding,
+ *args,
+ **kwargs,
+ )
+
+
+if TYPE_CHECKING:
+ from typing import AsyncIterable, AsyncIterator
+
+ _AsyncIterator = AsyncIterator[bytes]
+ _AsyncIterable = AsyncIterable[bytes]
+else:
+ from collections.abc import AsyncIterable, AsyncIterator
+
+ _AsyncIterator = AsyncIterator
+ _AsyncIterable = AsyncIterable
+
+
+class AsyncIterablePayload(Payload):
+
+ _iter: Optional[_AsyncIterator] = None
+ _value: _AsyncIterable
+ _cached_chunks: Optional[List[bytes]] = None
+ # _consumed stays False to allow reuse with cached content
+ _autoclose = True # Iterator doesn't need explicit closing
+
+ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
+ if not isinstance(value, AsyncIterable):
+ raise TypeError(
+ "value argument must support "
+ "collections.abc.AsyncIterable interface, "
+ "got {!r}".format(type(value))
+ )
+
+ if "content_type" not in kwargs:
+ kwargs["content_type"] = "application/octet-stream"
+
+ super().__init__(value, *args, **kwargs)
+
+ self._iter = value.__aiter__()
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """
+ Write the entire async iterable payload to the writer stream.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+
+ This method iterates through the async iterable and writes each chunk
+ to the writer without any length constraint.
+
+ Note:
+ For new implementations that need length control, use write_with_length() directly.
+ This method is maintained for backwards compatibility with existing code.
+
+ """
+ await self.write_with_length(writer, None)
+
+ async def write_with_length(
+ self, writer: AbstractStreamWriter, content_length: Optional[int]
+ ) -> None:
+ """
+ Write async iterable payload with a specific content length constraint.
+
+ Args:
+ writer: An AbstractStreamWriter instance that handles the actual writing
+ content_length: Maximum number of bytes to write (None for unlimited)
+
+ This implementation handles streaming of async iterable content with length constraints:
+
+ 1. If cached chunks are available, writes from them
+ 2. Otherwise iterates through the async iterable one chunk at a time
+ 3. Respects content_length constraints when specified
+ 4. Does NOT generate cache - that's done by as_bytes()
+
+ """
+ # If we have cached chunks, use them
+ if self._cached_chunks is not None:
+ remaining_bytes = content_length
+ for chunk in self._cached_chunks:
+ if remaining_bytes is None:
+ await writer.write(chunk)
+ elif remaining_bytes > 0:
+ await writer.write(chunk[:remaining_bytes])
+ remaining_bytes -= len(chunk)
+ else:
+ break
+ return
+
+ # If iterator is exhausted and we don't have cached chunks, nothing to write
+ if self._iter is None:
+ return
+
+ # Stream from the iterator
+ remaining_bytes = content_length
+
+ try:
+ while True:
+ if sys.version_info >= (3, 10):
+ chunk = await anext(self._iter)
+ else:
+ chunk = await self._iter.__anext__()
+ if remaining_bytes is None:
+ await writer.write(chunk)
+ # If we have a content length limit
+ elif remaining_bytes > 0:
+ await writer.write(chunk[:remaining_bytes])
+ remaining_bytes -= len(chunk)
+ # We still want to exhaust the iterator even
+ # if we have reached the content length limit
+ # since the file handle may not get closed by
+ # the iterator if we don't do this
+ except StopAsyncIteration:
+ # Iterator is exhausted
+ self._iter = None
+ self._consumed = True # Mark as consumed when streamed without caching
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """Decode the payload content as a string if cached chunks are available."""
+ if self._cached_chunks is not None:
+ return b"".join(self._cached_chunks).decode(encoding, errors)
+ raise TypeError("Unable to decode - content not cached. Call as_bytes() first.")
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire async iterable content and returns it as bytes.
+ It generates and caches the chunks for future reuse.
+ """
+ # If we have cached chunks, return them joined
+ if self._cached_chunks is not None:
+ return b"".join(self._cached_chunks)
+
+ # If iterator is exhausted and no cache, return empty
+ if self._iter is None:
+ return b""
+
+ # Read all chunks and cache them
+ chunks: List[bytes] = []
+ async for chunk in self._iter:
+ chunks.append(chunk)
+
+ # Iterator is exhausted, cache the chunks
+ self._iter = None
+ self._cached_chunks = chunks
+ # Keep _consumed as False to allow reuse with cached chunks
+
+ return b"".join(chunks)
+
+
+class StreamReaderPayload(AsyncIterablePayload):
+ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value.iter_any(), *args, **kwargs)
+
+
+PAYLOAD_REGISTRY = PayloadRegistry()
+PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
+PAYLOAD_REGISTRY.register(StringPayload, str)
+PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
+PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
+PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
+PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
+PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
+PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
+# try_last for giving a chance to more specialized async interables like
+# multipart.BodyPartReaderPayload override the default
+PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/payload_streamer.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/payload_streamer.py"
new file mode 100644
index 0000000..831fdc0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/payload_streamer.py"
@@ -0,0 +1,78 @@
+"""
+Payload implementation for coroutines as data provider.
+
+As a simple case, you can upload data from file::
+
+ @aiohttp.streamer
+ async def file_sender(writer, file_name=None):
+ with open(file_name, 'rb') as f:
+ chunk = f.read(2**16)
+ while chunk:
+ await writer.write(chunk)
+
+ chunk = f.read(2**16)
+
+Then you can use `file_sender` like this:
+
+ async with session.post('http://httpbin.org/post',
+ data=file_sender(file_name='huge_file')) as resp:
+ print(await resp.text())
+
+..note:: Coroutine must accept `writer` as first argument
+
+"""
+
+import types
+import warnings
+from typing import Any, Awaitable, Callable, Dict, Tuple
+
+from .abc import AbstractStreamWriter
+from .payload import Payload, payload_type
+
+__all__ = ("streamer",)
+
+
+class _stream_wrapper:
+ def __init__(
+ self,
+ coro: Callable[..., Awaitable[None]],
+ args: Tuple[Any, ...],
+ kwargs: Dict[str, Any],
+ ) -> None:
+ self.coro = types.coroutine(coro)
+ self.args = args
+ self.kwargs = kwargs
+
+ async def __call__(self, writer: AbstractStreamWriter) -> None:
+ await self.coro(writer, *self.args, **self.kwargs)
+
+
+class streamer:
+ def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
+ warnings.warn(
+ "@streamer is deprecated, use async generators instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.coro = coro
+
+ def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
+ return _stream_wrapper(self.coro, args, kwargs)
+
+
+@payload_type(_stream_wrapper)
+class StreamWrapperPayload(Payload):
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await self._value(writer)
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ raise TypeError("Unable to decode.")
+
+
+@payload_type(streamer)
+class StreamPayload(StreamWrapperPayload):
+ def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value(), *args, **kwargs)
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await self._value(writer)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/py.typed"
new file mode 100644
index 0000000..f5642f7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/py.typed"
@@ -0,0 +1 @@
+Marker
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/pytest_plugin.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/pytest_plugin.py"
new file mode 100644
index 0000000..7d59fe8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/pytest_plugin.py"
@@ -0,0 +1,444 @@
+import asyncio
+import contextlib
+import inspect
+import warnings
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterator,
+ Optional,
+ Protocol,
+ Union,
+ overload,
+)
+
+import pytest
+
+from .test_utils import (
+ BaseTestServer,
+ RawTestServer,
+ TestClient,
+ TestServer,
+ loop_context,
+ setup_test_loop,
+ teardown_test_loop,
+ unused_port as _unused_port,
+)
+from .web import Application, BaseRequest, Request
+from .web_protocol import _RequestHandler
+
+try:
+ import uvloop
+except ImportError: # pragma: no cover
+ uvloop = None # type: ignore[assignment]
+
+
+class AiohttpClient(Protocol):
+ @overload
+ async def __call__(
+ self,
+ __param: Application,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[Request, Application]: ...
+ @overload
+ async def __call__(
+ self,
+ __param: BaseTestServer,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[BaseRequest, None]: ...
+
+
+class AiohttpServer(Protocol):
+ def __call__(
+ self, app: Application, *, port: Optional[int] = None, **kwargs: Any
+ ) -> Awaitable[TestServer]: ...
+
+
+class AiohttpRawServer(Protocol):
+ def __call__(
+ self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
+ ) -> Awaitable[RawTestServer]: ...
+
+
+def pytest_addoption(parser): # type: ignore[no-untyped-def]
+ parser.addoption(
+ "--aiohttp-fast",
+ action="store_true",
+ default=False,
+ help="run tests faster by disabling extra checks",
+ )
+ parser.addoption(
+ "--aiohttp-loop",
+ action="store",
+ default="pyloop",
+ help="run tests with specific loop: pyloop, uvloop or all",
+ )
+ parser.addoption(
+ "--aiohttp-enable-loop-debug",
+ action="store_true",
+ default=False,
+ help="enable event loop debug mode",
+ )
+
+
+def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
+ """Set up pytest fixture.
+
+ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
+ """
+ func = fixturedef.func
+
+ if inspect.isasyncgenfunction(func):
+ # async generator fixture
+ is_async_gen = True
+ elif inspect.iscoroutinefunction(func):
+ # regular async fixture
+ is_async_gen = False
+ else:
+ # not an async fixture, nothing to do
+ return
+
+ strip_request = False
+ if "request" not in fixturedef.argnames:
+ fixturedef.argnames += ("request",)
+ strip_request = True
+
+ def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
+ request = kwargs["request"]
+ if strip_request:
+ del kwargs["request"]
+
+ # if neither the fixture nor the test use the 'loop' fixture,
+ # 'getfixturevalue' will fail because the test is not parameterized
+ # (this can be removed someday if 'loop' is no longer parameterized)
+ if "loop" not in request.fixturenames:
+ raise Exception(
+ "Asynchronous fixtures must depend on the 'loop' fixture or "
+ "be used in tests depending from it."
+ )
+
+ _loop = request.getfixturevalue("loop")
+
+ if is_async_gen:
+ # for async generators, we need to advance the generator once,
+ # then advance it again in a finalizer
+ gen = func(*args, **kwargs)
+
+ def finalizer(): # type: ignore[no-untyped-def]
+ try:
+ return _loop.run_until_complete(gen.__anext__())
+ except StopAsyncIteration:
+ pass
+
+ request.addfinalizer(finalizer)
+ return _loop.run_until_complete(gen.__anext__())
+ else:
+ return _loop.run_until_complete(func(*args, **kwargs))
+
+ fixturedef.func = wrapper
+
+
+@pytest.fixture
+def fast(request): # type: ignore[no-untyped-def]
+ """--fast config option"""
+ return request.config.getoption("--aiohttp-fast")
+
+
+@pytest.fixture
+def loop_debug(request): # type: ignore[no-untyped-def]
+ """--enable-loop-debug config option"""
+ return request.config.getoption("--aiohttp-enable-loop-debug")
+
+
+@contextlib.contextmanager
+def _runtime_warning_context(): # type: ignore[no-untyped-def]
+ """Context manager which checks for RuntimeWarnings.
+
+ This exists specifically to
+ avoid "coroutine 'X' was never awaited" warnings being missed.
+
+ If RuntimeWarnings occur in the context a RuntimeError is raised.
+ """
+ with warnings.catch_warnings(record=True) as _warnings:
+ yield
+ rw = [
+ "{w.filename}:{w.lineno}:{w.message}".format(w=w)
+ for w in _warnings
+ if w.category == RuntimeWarning
+ ]
+ if rw:
+ raise RuntimeError(
+ "{} Runtime Warning{},\n{}".format(
+ len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
+ )
+ )
+
+
+@contextlib.contextmanager
+def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
+ """Passthrough loop context.
+
+ Sets up and tears down a loop unless one is passed in via the loop
+ argument when it's passed straight through.
+ """
+ if loop:
+ # loop already exists, pass it straight through
+ yield loop
+ else:
+ # this shadows loop_context's standard behavior
+ loop = setup_test_loop()
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
+ """Fix pytest collecting for coroutines."""
+ if collector.funcnamefilter(name) and inspect.iscoroutinefunction(obj):
+ return list(collector._genfunctions(name, obj))
+
+
+def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
+ """Run coroutines in an event loop instead of a normal function call."""
+ fast = pyfuncitem.config.getoption("--aiohttp-fast")
+ if inspect.iscoroutinefunction(pyfuncitem.function):
+ existing_loop = (
+ pyfuncitem.funcargs.get("proactor_loop")
+ or pyfuncitem.funcargs.get("selector_loop")
+ or pyfuncitem.funcargs.get("uvloop_loop")
+ or pyfuncitem.funcargs.get("loop", None)
+ )
+
+ with _runtime_warning_context():
+ with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
+ testargs = {
+ arg: pyfuncitem.funcargs[arg]
+ for arg in pyfuncitem._fixtureinfo.argnames
+ }
+ _loop.run_until_complete(pyfuncitem.obj(**testargs))
+
+ return True
+
+
+def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
+ if "loop_factory" not in metafunc.fixturenames:
+ return
+
+ loops = metafunc.config.option.aiohttp_loop
+ avail_factories: dict[str, Callable[[], asyncio.AbstractEventLoop]]
+ avail_factories = {"pyloop": asyncio.new_event_loop}
+
+ if uvloop is not None: # pragma: no cover
+ avail_factories["uvloop"] = uvloop.new_event_loop
+
+ if loops == "all":
+ loops = "pyloop,uvloop?"
+
+ factories = {} # type: ignore[var-annotated]
+ for name in loops.split(","):
+ required = not name.endswith("?")
+ name = name.strip(" ?")
+ if name not in avail_factories: # pragma: no cover
+ if required:
+ raise ValueError(
+ "Unknown loop '%s', available loops: %s"
+ % (name, list(factories.keys()))
+ )
+ else:
+ continue
+ factories[name] = avail_factories[name]
+ metafunc.parametrize(
+ "loop_factory", list(factories.values()), ids=list(factories.keys())
+ )
+
+
+@pytest.fixture
+def loop(
+ loop_factory: Callable[[], asyncio.AbstractEventLoop],
+ fast: bool,
+ loop_debug: bool,
+) -> Iterator[asyncio.AbstractEventLoop]:
+ """Return an instance of the event loop."""
+ with loop_context(loop_factory, fast=fast) as _loop:
+ if loop_debug:
+ _loop.set_debug(True) # pragma: no cover
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def proactor_loop() -> Iterator[asyncio.AbstractEventLoop]:
+ factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
+
+ with loop_context(factory) as _loop:
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
+ warnings.warn(
+ "Deprecated, use aiohttp_unused_port fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_unused_port
+
+
+@pytest.fixture
+def aiohttp_unused_port() -> Callable[[], int]:
+ """Return a port that is unused on the current host."""
+ return _unused_port
+
+
+@pytest.fixture
+def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
+ """Factory to create a TestServer instance, given an app.
+
+ aiohttp_server(app, **kwargs)
+ """
+ servers = []
+
+ async def go(
+ app: Application,
+ *,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ) -> TestServer:
+ server = TestServer(app, host=host, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_server
+
+
+@pytest.fixture
+def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
+ """Factory to create a RawTestServer instance, given a web handler.
+
+ aiohttp_raw_server(handler, **kwargs)
+ """
+ servers = []
+
+ async def go(
+ handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
+ ) -> RawTestServer:
+ server = RawTestServer(handler, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
+ aiohttp_raw_server,
+):
+ warnings.warn(
+ "Deprecated, use aiohttp_raw_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_raw_server
+
+
+@pytest.fixture
+def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]:
+ """Factory to create a TestClient instance.
+
+ aiohttp_client(app, **kwargs)
+ aiohttp_client(server, **kwargs)
+ aiohttp_client(raw_server, **kwargs)
+ """
+ clients = []
+
+ @overload
+ async def go(
+ __param: Application,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[Request, Application]: ...
+
+ @overload
+ async def go(
+ __param: BaseTestServer,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[BaseRequest, None]: ...
+
+ async def go(
+ __param: Union[Application, BaseTestServer],
+ *args: Any,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[Any, Any]:
+ if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
+ __param, (Application, BaseTestServer)
+ ):
+ __param = __param(loop, *args, **kwargs)
+ kwargs = {}
+ else:
+ assert not args, "args should be empty"
+
+ if isinstance(__param, Application):
+ server_kwargs = server_kwargs or {}
+ server = TestServer(__param, loop=loop, **server_kwargs)
+ client = TestClient(server, loop=loop, **kwargs)
+ elif isinstance(__param, BaseTestServer):
+ client = TestClient(__param, loop=loop, **kwargs)
+ else:
+ raise ValueError("Unknown argument type: %r" % type(__param))
+
+ await client.start_server()
+ clients.append(client)
+ return client
+
+ yield go
+
+ async def finalize() -> None:
+ while clients:
+ await clients.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_client fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_client
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/resolver.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/resolver.py"
new file mode 100644
index 0000000..b20e567
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/resolver.py"
@@ -0,0 +1,274 @@
+import asyncio
+import socket
+import weakref
+from typing import Any, Dict, Final, List, Optional, Tuple, Type, Union
+
+from .abc import AbstractResolver, ResolveResult
+
+__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
+
+
+try:
+ import aiodns
+
+ aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
+except ImportError: # pragma: no cover
+ aiodns = None # type: ignore[assignment]
+ aiodns_default = False
+
+
+_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
+_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
+_AI_ADDRCONFIG = socket.AI_ADDRCONFIG
+if hasattr(socket, "AI_MASK"):
+ _AI_ADDRCONFIG &= socket.AI_MASK
+
+
+class ThreadedResolver(AbstractResolver):
+ """Threaded resolver.
+
+ Uses an Executor for synchronous getaddrinfo() calls.
+ concurrent.futures.ThreadPoolExecutor is used by default.
+ """
+
+ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = loop or asyncio.get_running_loop()
+
+ async def resolve(
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
+ ) -> List[ResolveResult]:
+ infos = await self._loop.getaddrinfo(
+ host,
+ port,
+ type=socket.SOCK_STREAM,
+ family=family,
+ flags=_AI_ADDRCONFIG,
+ )
+
+ hosts: List[ResolveResult] = []
+ for family, _, proto, _, address in infos:
+ if family == socket.AF_INET6:
+ if len(address) < 3:
+ # IPv6 is not supported by Python build,
+ # or IPv6 is not enabled in the host
+ continue
+ if address[3]:
+ # This is essential for link-local IPv6 addresses.
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
+ # getnameinfo() unconditionally, but performance makes sense.
+ resolved_host, _port = await self._loop.getnameinfo(
+ address, _NAME_SOCKET_FLAGS
+ )
+ port = int(_port)
+ else:
+ resolved_host, port = address[:2]
+ else: # IPv4
+ assert family == socket.AF_INET
+ resolved_host, port = address # type: ignore[misc]
+ hosts.append(
+ ResolveResult(
+ hostname=host,
+ host=resolved_host,
+ port=port,
+ family=family,
+ proto=proto,
+ flags=_NUMERIC_SOCKET_FLAGS,
+ )
+ )
+
+ return hosts
+
+ async def close(self) -> None:
+ pass
+
+
+class AsyncResolver(AbstractResolver):
+ """Use the `aiodns` package to make asynchronous DNS lookups"""
+
+ def __init__(
+ self,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+ if aiodns is None:
+ raise RuntimeError("Resolver requires aiodns library")
+
+ self._loop = loop or asyncio.get_running_loop()
+ self._manager: Optional[_DNSResolverManager] = None
+ # If custom args are provided, create a dedicated resolver instance
+ # This means each AsyncResolver with custom args gets its own
+ # aiodns.DNSResolver instance
+ if args or kwargs:
+ self._resolver = aiodns.DNSResolver(*args, **kwargs)
+ return
+ # Use the shared resolver from the manager for default arguments
+ self._manager = _DNSResolverManager()
+ self._resolver = self._manager.get_resolver(self, self._loop)
+
+ if not hasattr(self._resolver, "gethostbyname"):
+ # aiodns 1.1 is not available, fallback to DNSResolver.query
+ self.resolve = self._resolve_with_query # type: ignore
+
+ async def resolve(
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
+ ) -> List[ResolveResult]:
+ try:
+ resp = await self._resolver.getaddrinfo(
+ host,
+ port=port,
+ type=socket.SOCK_STREAM,
+ family=family,
+ flags=_AI_ADDRCONFIG,
+ )
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(None, msg) from exc
+ hosts: List[ResolveResult] = []
+ for node in resp.nodes:
+ address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr
+ family = node.family
+ if family == socket.AF_INET6:
+ if len(address) > 3 and address[3]:
+ # This is essential for link-local IPv6 addresses.
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
+ # getnameinfo() unconditionally, but performance makes sense.
+ result = await self._resolver.getnameinfo(
+ (address[0].decode("ascii"), *address[1:]),
+ _NAME_SOCKET_FLAGS,
+ )
+ resolved_host = result.node
+ else:
+ resolved_host = address[0].decode("ascii")
+ port = address[1]
+ else: # IPv4
+ assert family == socket.AF_INET
+ resolved_host = address[0].decode("ascii")
+ port = address[1]
+ hosts.append(
+ ResolveResult(
+ hostname=host,
+ host=resolved_host,
+ port=port,
+ family=family,
+ proto=0,
+ flags=_NUMERIC_SOCKET_FLAGS,
+ )
+ )
+
+ if not hosts:
+ raise OSError(None, "DNS lookup failed")
+
+ return hosts
+
+ async def _resolve_with_query(
+ self, host: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ qtype: Final = "AAAA" if family == socket.AF_INET6 else "A"
+
+ try:
+ resp = await self._resolver.query(host, qtype)
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(None, msg) from exc
+
+ hosts = []
+ for rr in resp:
+ hosts.append(
+ {
+ "hostname": host,
+ "host": rr.host,
+ "port": port,
+ "family": family,
+ "proto": 0,
+ "flags": socket.AI_NUMERICHOST,
+ }
+ )
+
+ if not hosts:
+ raise OSError(None, "DNS lookup failed")
+
+ return hosts
+
+ async def close(self) -> None:
+ if self._manager:
+ # Release the resolver from the manager if using the shared resolver
+ self._manager.release_resolver(self, self._loop)
+ self._manager = None # Clear reference to manager
+ self._resolver = None # type: ignore[assignment] # Clear reference to resolver
+ return
+ # Otherwise cancel our dedicated resolver
+ if self._resolver is not None:
+ self._resolver.cancel()
+ self._resolver = None # type: ignore[assignment] # Clear reference
+
+
+class _DNSResolverManager:
+ """Manager for aiodns.DNSResolver objects.
+
+ This class manages shared aiodns.DNSResolver instances
+ with no custom arguments across different event loops.
+ """
+
+ _instance: Optional["_DNSResolverManager"] = None
+
+ def __new__(cls) -> "_DNSResolverManager":
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ cls._instance._init()
+ return cls._instance
+
+ def _init(self) -> None:
+ # Use WeakKeyDictionary to allow event loops to be garbage collected
+ self._loop_data: weakref.WeakKeyDictionary[
+ asyncio.AbstractEventLoop,
+ tuple["aiodns.DNSResolver", weakref.WeakSet["AsyncResolver"]],
+ ] = weakref.WeakKeyDictionary()
+
+ def get_resolver(
+ self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop
+ ) -> "aiodns.DNSResolver":
+ """Get or create the shared aiodns.DNSResolver instance for a specific event loop.
+
+ Args:
+ client: The AsyncResolver instance requesting the resolver.
+ This is required to track resolver usage.
+ loop: The event loop to use for the resolver.
+ """
+ # Create a new resolver and client set for this loop if it doesn't exist
+ if loop not in self._loop_data:
+ resolver = aiodns.DNSResolver(loop=loop)
+ client_set: weakref.WeakSet["AsyncResolver"] = weakref.WeakSet()
+ self._loop_data[loop] = (resolver, client_set)
+ else:
+ # Get the existing resolver and client set
+ resolver, client_set = self._loop_data[loop]
+
+ # Register this client with the loop
+ client_set.add(client)
+ return resolver
+
+ def release_resolver(
+ self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop
+ ) -> None:
+ """Release the resolver for an AsyncResolver client when it's closed.
+
+ Args:
+ client: The AsyncResolver instance to release.
+ loop: The event loop the resolver was using.
+ """
+ # Remove client from its loop's tracking
+ current_loop_data = self._loop_data.get(loop)
+ if current_loop_data is None:
+ return
+ resolver, client_set = current_loop_data
+ client_set.discard(client)
+ # If no more clients for this loop, cancel and remove its resolver
+ if not client_set:
+ if resolver is not None:
+ resolver.cancel()
+ del self._loop_data[loop]
+
+
+_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
+DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/streams.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/streams.py"
new file mode 100644
index 0000000..6cc74fc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/streams.py"
@@ -0,0 +1,758 @@
+import asyncio
+import collections
+import warnings
+from typing import (
+ Awaitable,
+ Callable,
+ Deque,
+ Final,
+ Generic,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+)
+
+from .base_protocol import BaseProtocol
+from .helpers import (
+ _EXC_SENTINEL,
+ BaseTimerContext,
+ TimerNoop,
+ set_exception,
+ set_result,
+)
+from .log import internal_logger
+
+__all__ = (
+ "EMPTY_PAYLOAD",
+ "EofStream",
+ "StreamReader",
+ "DataQueue",
+)
+
+_T = TypeVar("_T")
+
+
+class EofStream(Exception):
+ """eof stream indication."""
+
+
+class AsyncStreamIterator(Generic[_T]):
+
+ __slots__ = ("read_func",)
+
+ def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
+ self.read_func = read_func
+
+ def __aiter__(self) -> "AsyncStreamIterator[_T]":
+ return self
+
+ async def __anext__(self) -> _T:
+ try:
+ rv = await self.read_func()
+ except EofStream:
+ raise StopAsyncIteration
+ if rv == b"":
+ raise StopAsyncIteration
+ return rv
+
+
+class ChunkTupleAsyncStreamIterator:
+
+ __slots__ = ("_stream",)
+
+ def __init__(self, stream: "StreamReader") -> None:
+ self._stream = stream
+
+ def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
+ return self
+
+ async def __anext__(self) -> Tuple[bytes, bool]:
+ rv = await self._stream.readchunk()
+ if rv == (b"", False):
+ raise StopAsyncIteration
+ return rv
+
+
+class AsyncStreamReaderMixin:
+
+ __slots__ = ()
+
+ def __aiter__(self) -> AsyncStreamIterator[bytes]:
+ return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
+
+ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
+ """Returns an asynchronous iterator that yields chunks of size n."""
+ return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
+
+ def iter_any(self) -> AsyncStreamIterator[bytes]:
+ """Yield all available data as soon as it is received."""
+ return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
+
+ def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
+ """Yield chunks of data as they are received by the server.
+
+ The yielded objects are tuples
+ of (bytes, bool) as returned by the StreamReader.readchunk method.
+ """
+ return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
+
+
+class StreamReader(AsyncStreamReaderMixin):
+ """An enhancement of asyncio.StreamReader.
+
+ Supports asynchronous iteration by line, chunk or as available::
+
+ async for line in reader:
+ ...
+ async for chunk in reader.iter_chunked(1024):
+ ...
+ async for slice in reader.iter_any():
+ ...
+
+ """
+
+ __slots__ = (
+ "_protocol",
+ "_low_water",
+ "_high_water",
+ "_low_water_chunks",
+ "_high_water_chunks",
+ "_loop",
+ "_size",
+ "_cursor",
+ "_http_chunk_splits",
+ "_buffer",
+ "_buffer_offset",
+ "_eof",
+ "_waiter",
+ "_eof_waiter",
+ "_exception",
+ "_timer",
+ "_eof_callbacks",
+ "_eof_counter",
+ "total_bytes",
+ "total_compressed_bytes",
+ )
+
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ limit: int,
+ *,
+ timer: Optional[BaseTimerContext] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ self._protocol = protocol
+ self._low_water = limit
+ self._high_water = limit * 2
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ # Ensure high_water_chunks >= 3 so it's always > low_water_chunks.
+ self._high_water_chunks = max(3, limit // 4)
+ # Use max(2, ...) because there's always at least 1 chunk split remaining
+ # (the current position), so we need low_water >= 2 to allow resume.
+ self._low_water_chunks = max(2, self._high_water_chunks // 2)
+ self._loop = loop
+ self._size = 0
+ self._cursor = 0
+ self._http_chunk_splits: Optional[Deque[int]] = None
+ self._buffer: Deque[bytes] = collections.deque()
+ self._buffer_offset = 0
+ self._eof = False
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._eof_waiter: Optional[asyncio.Future[None]] = None
+ self._exception: Optional[BaseException] = None
+ self._timer = TimerNoop() if timer is None else timer
+ self._eof_callbacks: List[Callable[[], None]] = []
+ self._eof_counter = 0
+ self.total_bytes = 0
+ self.total_compressed_bytes: Optional[int] = None
+
+ def __repr__(self) -> str:
+ info = [self.__class__.__name__]
+ if self._size:
+ info.append("%d bytes" % self._size)
+ if self._eof:
+ info.append("eof")
+ if self._low_water != 2**16: # default limit
+ info.append("low=%d high=%d" % (self._low_water, self._high_water))
+ if self._waiter:
+ info.append("w=%r" % self._waiter)
+ if self._exception:
+ info.append("e=%r" % self._exception)
+ return "<%s>" % " ".join(info)
+
+ def get_read_buffer_limits(self) -> Tuple[int, int]:
+ return (self._low_water, self._high_water)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ self._exception = exc
+ self._eof_callbacks.clear()
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_exception(waiter, exc, exc_cause)
+
+ waiter = self._eof_waiter
+ if waiter is not None:
+ self._eof_waiter = None
+ set_exception(waiter, exc, exc_cause)
+
+ def on_eof(self, callback: Callable[[], None]) -> None:
+ if self._eof:
+ try:
+ callback()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+ else:
+ self._eof_callbacks.append(callback)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ waiter = self._eof_waiter
+ if waiter is not None:
+ self._eof_waiter = None
+ set_result(waiter, None)
+
+ if self._protocol._reading_paused:
+ self._protocol.resume_reading()
+
+ for cb in self._eof_callbacks:
+ try:
+ cb()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+
+ self._eof_callbacks.clear()
+
+ def is_eof(self) -> bool:
+ """Return True if 'feed_eof' was called."""
+ return self._eof
+
+ def at_eof(self) -> bool:
+ """Return True if the buffer is empty and 'feed_eof' was called."""
+ return self._eof and not self._buffer
+
+ async def wait_eof(self) -> None:
+ if self._eof:
+ return
+
+ assert self._eof_waiter is None
+ self._eof_waiter = self._loop.create_future()
+ try:
+ await self._eof_waiter
+ finally:
+ self._eof_waiter = None
+
+ @property
+ def total_raw_bytes(self) -> int:
+ if self.total_compressed_bytes is None:
+ return self.total_bytes
+ return self.total_compressed_bytes
+
+ def unread_data(self, data: bytes) -> None:
+ """rollback reading some data from stream, inserting it to buffer head."""
+ warnings.warn(
+ "unread_data() is deprecated "
+ "and will be removed in future releases (#3260)",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if not data:
+ return
+
+ if self._buffer_offset:
+ self._buffer[0] = self._buffer[0][self._buffer_offset :]
+ self._buffer_offset = 0
+ self._size += len(data)
+ self._cursor -= len(data)
+ self._buffer.appendleft(data)
+ self._eof_counter = 0
+
+ # TODO: size is ignored, remove the param later
+ def feed_data(self, data: bytes, size: int = 0) -> None:
+ assert not self._eof, "feed_data after feed_eof"
+
+ if not data:
+ return
+
+ data_len = len(data)
+ self._size += data_len
+ self._buffer.append(data)
+ self.total_bytes += data_len
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ if self._size > self._high_water and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ def begin_http_chunk_receiving(self) -> None:
+ if self._http_chunk_splits is None:
+ if self.total_bytes:
+ raise RuntimeError(
+ "Called begin_http_chunk_receiving when some data was already fed"
+ )
+ self._http_chunk_splits = collections.deque()
+
+ def end_http_chunk_receiving(self) -> None:
+ if self._http_chunk_splits is None:
+ raise RuntimeError(
+ "Called end_chunk_receiving without calling "
+ "begin_chunk_receiving first"
+ )
+
+ # self._http_chunk_splits contains logical byte offsets from start of
+ # the body transfer. Each offset is the offset of the end of a chunk.
+ # "Logical" means bytes, accessible for a user.
+ # If no chunks containing logical data were received, current position
+ # is difinitely zero.
+ pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
+
+ if self.total_bytes == pos:
+ # We should not add empty chunks here. So we check for that.
+ # Note, when chunked + gzip is used, we can receive a chunk
+ # of compressed data, but that data may not be enough for gzip FSM
+ # to yield any uncompressed data. That's why current position may
+ # not change after receiving a chunk.
+ return
+
+ self._http_chunk_splits.append(self.total_bytes)
+
+ # If we get too many small chunks before self._high_water is reached, then any
+ # .read() call becomes computationally expensive, and could block the event loop
+ # for too long, hence an additional self._high_water_chunks here.
+ if (
+ len(self._http_chunk_splits) > self._high_water_chunks
+ and not self._protocol._reading_paused
+ ):
+ self._protocol.pause_reading()
+
+ # wake up readchunk when end of http chunk received
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ async def _wait(self, func_name: str) -> None:
+ if not self._protocol.connected:
+ raise RuntimeError("Connection closed.")
+
+ # StreamReader uses a future to link the protocol feed_data() method
+ # to a read coroutine. Running two read coroutines at the same time
+ # would have an unexpected behaviour. It would not possible to know
+ # which coroutine would get the next data.
+ if self._waiter is not None:
+ raise RuntimeError(
+ "%s() called while another coroutine is "
+ "already waiting for incoming data" % func_name
+ )
+
+ waiter = self._waiter = self._loop.create_future()
+ try:
+ with self._timer:
+ await waiter
+ finally:
+ self._waiter = None
+
+ async def readline(self) -> bytes:
+ return await self.readuntil()
+
+ async def readuntil(self, separator: bytes = b"\n") -> bytes:
+ seplen = len(separator)
+ if seplen == 0:
+ raise ValueError("Separator should be at least one-byte string")
+
+ if self._exception is not None:
+ raise self._exception
+
+ chunk = b""
+ chunk_size = 0
+ not_enough = True
+
+ while not_enough:
+ while self._buffer and not_enough:
+ offset = self._buffer_offset
+ ichar = self._buffer[0].find(separator, offset) + 1
+ # Read from current offset to found separator or to the end.
+ data = self._read_nowait_chunk(
+ ichar - offset + seplen - 1 if ichar else -1
+ )
+ chunk += data
+ chunk_size += len(data)
+ if ichar:
+ not_enough = False
+
+ if chunk_size > self._high_water:
+ raise ValueError("Chunk too big")
+
+ if self._eof:
+ break
+
+ if not_enough:
+ await self._wait("readuntil")
+
+ return chunk
+
+ async def read(self, n: int = -1) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ # migration problem; with DataQueue you have to catch
+ # EofStream exception, so common way is to run payload.read() inside
+ # infinite loop. what can cause real infinite loop with StreamReader
+ # lets keep this code one major release.
+ if __debug__:
+ if self._eof and not self._buffer:
+ self._eof_counter = getattr(self, "_eof_counter", 0) + 1
+ if self._eof_counter > 5:
+ internal_logger.warning(
+ "Multiple access to StreamReader in eof state, "
+ "might be infinite loop.",
+ stack_info=True,
+ )
+
+ if not n:
+ return b""
+
+ if n < 0:
+ # This used to just loop creating a new waiter hoping to
+ # collect everything in self._buffer, but that would
+ # deadlock if the subprocess sends more than self.limit
+ # bytes. So just call self.readany() until EOF.
+ blocks = []
+ while True:
+ block = await self.readany()
+ if not block:
+ break
+ blocks.append(block)
+ return b"".join(blocks)
+
+ # TODO: should be `if` instead of `while`
+ # because waiter maybe triggered on chunk end,
+ # without feeding any data
+ while not self._buffer and not self._eof:
+ await self._wait("read")
+
+ return self._read_nowait(n)
+
+ async def readany(self) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ # TODO: should be `if` instead of `while`
+ # because waiter maybe triggered on chunk end,
+ # without feeding any data
+ while not self._buffer and not self._eof:
+ await self._wait("readany")
+
+ return self._read_nowait(-1)
+
+ async def readchunk(self) -> Tuple[bytes, bool]:
+ """Returns a tuple of (data, end_of_http_chunk).
+
+ When chunked transfer
+ encoding is used, end_of_http_chunk is a boolean indicating if the end
+ of the data corresponds to the end of a HTTP chunk , otherwise it is
+ always False.
+ """
+ while True:
+ if self._exception is not None:
+ raise self._exception
+
+ while self._http_chunk_splits:
+ pos = self._http_chunk_splits.popleft()
+ if pos == self._cursor:
+ return (b"", True)
+ if pos > self._cursor:
+ return (self._read_nowait(pos - self._cursor), True)
+ internal_logger.warning(
+ "Skipping HTTP chunk end due to data "
+ "consumption beyond chunk boundary"
+ )
+
+ if self._buffer:
+ return (self._read_nowait_chunk(-1), False)
+ # return (self._read_nowait(-1), False)
+
+ if self._eof:
+ # Special case for signifying EOF.
+ # (b'', True) is not a final return value actually.
+ return (b"", False)
+
+ await self._wait("readchunk")
+
+ async def readexactly(self, n: int) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ blocks: List[bytes] = []
+ while n > 0:
+ block = await self.read(n)
+ if not block:
+ partial = b"".join(blocks)
+ raise asyncio.IncompleteReadError(partial, len(partial) + n)
+ blocks.append(block)
+ n -= len(block)
+
+ return b"".join(blocks)
+
+ def read_nowait(self, n: int = -1) -> bytes:
+ # default was changed to be consistent with .read(-1)
+ #
+ # I believe the most users don't know about the method and
+ # they are not affected.
+ if self._exception is not None:
+ raise self._exception
+
+ if self._waiter and not self._waiter.done():
+ raise RuntimeError(
+ "Called while some coroutine is waiting for incoming data."
+ )
+
+ return self._read_nowait(n)
+
+ def _read_nowait_chunk(self, n: int) -> bytes:
+ first_buffer = self._buffer[0]
+ offset = self._buffer_offset
+ if n != -1 and len(first_buffer) - offset > n:
+ data = first_buffer[offset : offset + n]
+ self._buffer_offset += n
+
+ elif offset:
+ self._buffer.popleft()
+ data = first_buffer[offset:]
+ self._buffer_offset = 0
+
+ else:
+ data = self._buffer.popleft()
+
+ data_len = len(data)
+ self._size -= data_len
+ self._cursor += data_len
+
+ chunk_splits = self._http_chunk_splits
+ # Prevent memory leak: drop useless chunk splits
+ while chunk_splits and chunk_splits[0] < self._cursor:
+ chunk_splits.popleft()
+
+ if (
+ self._protocol._reading_paused
+ and self._size < self._low_water
+ and (
+ self._http_chunk_splits is None
+ or len(self._http_chunk_splits) < self._low_water_chunks
+ )
+ ):
+ self._protocol.resume_reading()
+ return data
+
+ def _read_nowait(self, n: int) -> bytes:
+ """Read not more than n bytes, or whole buffer if n == -1"""
+ self._timer.assert_timeout()
+
+ chunks = []
+ while self._buffer:
+ chunk = self._read_nowait_chunk(n)
+ chunks.append(chunk)
+ if n != -1:
+ n -= len(chunk)
+ if n == 0:
+ break
+
+ return b"".join(chunks) if chunks else b""
+
+
+class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
+
+ __slots__ = ("_read_eof_chunk",)
+
+ def __init__(self) -> None:
+ self._read_eof_chunk = False
+ self.total_bytes = 0
+
+ def __repr__(self) -> str:
+ return "<%s>" % self.__class__.__name__
+
+ def exception(self) -> Optional[BaseException]:
+ return None
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ pass
+
+ def on_eof(self, callback: Callable[[], None]) -> None:
+ try:
+ callback()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+
+ def feed_eof(self) -> None:
+ pass
+
+ def is_eof(self) -> bool:
+ return True
+
+ def at_eof(self) -> bool:
+ return True
+
+ async def wait_eof(self) -> None:
+ return
+
+ def feed_data(self, data: bytes, n: int = 0) -> None:
+ pass
+
+ async def readline(self) -> bytes:
+ return b""
+
+ async def read(self, n: int = -1) -> bytes:
+ return b""
+
+ # TODO add async def readuntil
+
+ async def readany(self) -> bytes:
+ return b""
+
+ async def readchunk(self) -> Tuple[bytes, bool]:
+ if not self._read_eof_chunk:
+ self._read_eof_chunk = True
+ return (b"", False)
+
+ return (b"", True)
+
+ async def readexactly(self, n: int) -> bytes:
+ raise asyncio.IncompleteReadError(b"", n)
+
+ def read_nowait(self, n: int = -1) -> bytes:
+ return b""
+
+
+EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
+
+
+class DataQueue(Generic[_T]):
+ """DataQueue is a general-purpose blocking queue with one reader."""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._eof = False
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._exception: Optional[BaseException] = None
+ self._buffer: Deque[Tuple[_T, int]] = collections.deque()
+
+ def __len__(self) -> int:
+ return len(self._buffer)
+
+ def is_eof(self) -> bool:
+ return self._eof
+
+ def at_eof(self) -> bool:
+ return self._eof and not self._buffer
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ self._eof = True
+ self._exception = exc
+ if (waiter := self._waiter) is not None:
+ self._waiter = None
+ set_exception(waiter, exc, exc_cause)
+
+ def feed_data(self, data: _T, size: int = 0) -> None:
+ self._buffer.append((data, size))
+ if (waiter := self._waiter) is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+ if (waiter := self._waiter) is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ async def read(self) -> _T:
+ if not self._buffer and not self._eof:
+ assert not self._waiter
+ self._waiter = self._loop.create_future()
+ try:
+ await self._waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._waiter = None
+ raise
+ if self._buffer:
+ data, _ = self._buffer.popleft()
+ return data
+ if self._exception is not None:
+ raise self._exception
+ raise EofStream
+
+ def __aiter__(self) -> AsyncStreamIterator[_T]:
+ return AsyncStreamIterator(self.read)
+
+
+class FlowControlDataQueue(DataQueue[_T]):
+ """FlowControlDataQueue resumes and pauses an underlying stream.
+
+ It is a destination for parsed data.
+
+ This class is deprecated and will be removed in version 4.0.
+ """
+
+ def __init__(
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
+ ) -> None:
+ super().__init__(loop=loop)
+ self._size = 0
+ self._protocol = protocol
+ self._limit = limit * 2
+
+ def feed_data(self, data: _T, size: int = 0) -> None:
+ super().feed_data(data, size)
+ self._size += size
+
+ if self._size > self._limit and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ async def read(self) -> _T:
+ if not self._buffer and not self._eof:
+ assert not self._waiter
+ self._waiter = self._loop.create_future()
+ try:
+ await self._waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._waiter = None
+ raise
+ if self._buffer:
+ data, size = self._buffer.popleft()
+ self._size -= size
+ if self._size < self._limit and self._protocol._reading_paused:
+ self._protocol.resume_reading()
+ return data
+ if self._exception is not None:
+ raise self._exception
+ raise EofStream
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/tcp_helpers.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/tcp_helpers.py"
new file mode 100644
index 0000000..88b2442
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/tcp_helpers.py"
@@ -0,0 +1,37 @@
+"""Helper methods to tune a TCP connection"""
+
+import asyncio
+import socket
+from contextlib import suppress
+from typing import Optional # noqa
+
+__all__ = ("tcp_keepalive", "tcp_nodelay")
+
+
+if hasattr(socket, "SO_KEEPALIVE"):
+
+ def tcp_keepalive(transport: asyncio.Transport) -> None:
+ sock = transport.get_extra_info("socket")
+ if sock is not None:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+
+else:
+
+ def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
+ pass
+
+
+def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
+ sock = transport.get_extra_info("socket")
+
+ if sock is None:
+ return
+
+ if sock.family not in (socket.AF_INET, socket.AF_INET6):
+ return
+
+ value = bool(value)
+
+ # socket may be closed already, on windows OSError get raised
+ with suppress(OSError):
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/test_utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/test_utils.py"
new file mode 100644
index 0000000..87c3142
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/test_utils.py"
@@ -0,0 +1,774 @@
+"""Utilities shared by tests."""
+
+import asyncio
+import contextlib
+import gc
+import inspect
+import ipaddress
+import os
+import socket
+import sys
+import warnings
+from abc import ABC, abstractmethod
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Generic,
+ Iterator,
+ List,
+ Optional,
+ Type,
+ TypeVar,
+ cast,
+ overload,
+)
+from unittest import IsolatedAsyncioTestCase, mock
+
+from aiosignal import Signal
+from multidict import CIMultiDict, CIMultiDictProxy
+from yarl import URL
+
+import aiohttp
+from aiohttp.client import (
+ _RequestContextManager,
+ _RequestOptions,
+ _WSRequestContextManager,
+)
+
+from . import ClientSession, hdrs
+from .abc import AbstractCookieJar
+from .client_reqrep import ClientResponse
+from .client_ws import ClientWebSocketResponse
+from .helpers import sentinel
+from .http import HttpVersion, RawRequestMessage
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .typedefs import StrOrURL
+from .web import (
+ Application,
+ AppRunner,
+ BaseRequest,
+ BaseRunner,
+ Request,
+ Server,
+ ServerRunner,
+ SockSite,
+ UrlMappingMatchInfo,
+)
+from .web_protocol import _RequestHandler
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ SSLContext = None
+
+if sys.version_info >= (3, 11) and TYPE_CHECKING:
+ from typing import Unpack
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ Self = Any
+
+_ApplicationNone = TypeVar("_ApplicationNone", Application, None)
+_Request = TypeVar("_Request", bound=BaseRequest)
+
+REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
+
+
+def get_unused_port_socket(
+ host: str, family: socket.AddressFamily = socket.AF_INET
+) -> socket.socket:
+ return get_port_socket(host, 0, family)
+
+
+def get_port_socket(
+ host: str, port: int, family: socket.AddressFamily
+) -> socket.socket:
+ s = socket.socket(family, socket.SOCK_STREAM)
+ if REUSE_ADDRESS:
+ # Windows has different semantics for SO_REUSEADDR,
+ # so don't set it. Ref:
+ # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ s.bind((host, port))
+ return s
+
+
+def unused_port() -> int:
+ """Return a port that is unused on the current host."""
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.bind(("127.0.0.1", 0))
+ return cast(int, s.getsockname()[1])
+
+
+class BaseTestServer(ABC):
+ __test__ = False
+
+ def __init__(
+ self,
+ *,
+ scheme: str = "",
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ skip_url_asserts: bool = False,
+ socket_factory: Callable[
+ [str, int, socket.AddressFamily], socket.socket
+ ] = get_port_socket,
+ **kwargs: Any,
+ ) -> None:
+ self._loop = loop
+ self.runner: Optional[BaseRunner] = None
+ self._root: Optional[URL] = None
+ self.host = host
+ self.port = port
+ self._closed = False
+ self.scheme = scheme
+ self.skip_url_asserts = skip_url_asserts
+ self.socket_factory = socket_factory
+
+ async def start_server(
+ self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
+ ) -> None:
+ if self.runner:
+ return
+ self._loop = loop
+ self._ssl = kwargs.pop("ssl", None)
+ self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
+ await self.runner.setup()
+ if not self.port:
+ self.port = 0
+ absolute_host = self.host
+ try:
+ version = ipaddress.ip_address(self.host).version
+ except ValueError:
+ version = 4
+ if version == 6:
+ absolute_host = f"[{self.host}]"
+ family = socket.AF_INET6 if version == 6 else socket.AF_INET
+ _sock = self.socket_factory(self.host, self.port, family)
+ self.host, self.port = _sock.getsockname()[:2]
+ site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
+ await site.start()
+ server = site._server
+ assert server is not None
+ sockets = server.sockets # type: ignore[attr-defined]
+ assert sockets is not None
+ self.port = sockets[0].getsockname()[1]
+ if not self.scheme:
+ self.scheme = "https" if self._ssl else "http"
+ self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
+
+ @abstractmethod # pragma: no cover
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ pass
+
+ def make_url(self, path: StrOrURL) -> URL:
+ assert self._root is not None
+ url = URL(path)
+ if not self.skip_url_asserts:
+ assert not url.absolute
+ return self._root.join(url)
+ else:
+ return URL(str(self._root) + str(path))
+
+ @property
+ def started(self) -> bool:
+ return self.runner is not None
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def handler(self) -> Server:
+ # for backward compatibility
+ # web.Server instance
+ runner = self.runner
+ assert runner is not None
+ assert runner.server is not None
+ return runner.server
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run when the object is garbage collected, and on
+ exit when used as a context manager.
+
+ """
+ if self.started and not self.closed:
+ assert self.runner is not None
+ await self.runner.cleanup()
+ self._root = None
+ self.port = None
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "BaseTestServer":
+ await self.start_server(loop=self._loop)
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class TestServer(BaseTestServer):
+ def __init__(
+ self,
+ app: Application,
+ *,
+ scheme: str = "",
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ):
+ self.app = app
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ return AppRunner(self.app, **kwargs)
+
+
+class RawTestServer(BaseTestServer):
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ scheme: str = "",
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._handler = handler
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
+ srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
+ return ServerRunner(srv, debug=debug, **kwargs)
+
+
+class TestClient(Generic[_Request, _ApplicationNone]):
+ """
+ A test client implementation.
+
+ To write functional tests for aiohttp based servers.
+
+ """
+
+ __test__ = False
+
+ @overload
+ def __init__(
+ self: "TestClient[Request, Application]",
+ server: TestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ **kwargs: Any,
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: "TestClient[_Request, None]",
+ server: BaseTestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ **kwargs: Any,
+ ) -> None: ...
+ def __init__(
+ self,
+ server: BaseTestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+ ) -> None:
+ if not isinstance(server, BaseTestServer):
+ raise TypeError(
+ "server must be TestServer instance, found type: %r" % type(server)
+ )
+ self._server = server
+ self._loop = loop
+ if cookie_jar is None:
+ cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
+ self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
+ self._session._retry_connection = False
+ self._closed = False
+ self._responses: List[ClientResponse] = []
+ self._websockets: List[ClientWebSocketResponse] = []
+
+ async def start_server(self) -> None:
+ await self._server.start_server(loop=self._loop)
+
+ @property
+ def host(self) -> str:
+ return self._server.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._server.port
+
+ @property
+ def server(self) -> BaseTestServer:
+ return self._server
+
+ @property
+ def app(self) -> _ApplicationNone:
+ return getattr(self._server, "app", None) # type: ignore[return-value]
+
+ @property
+ def session(self) -> ClientSession:
+ """An internal aiohttp.ClientSession.
+
+ Unlike the methods on the TestClient, client session requests
+ do not automatically include the host in the url queried, and
+ will require an absolute path to the resource.
+
+ """
+ return self._session
+
+ def make_url(self, path: StrOrURL) -> URL:
+ return self._server.make_url(path)
+
+ async def _request(
+ self, method: str, path: StrOrURL, **kwargs: Any
+ ) -> ClientResponse:
+ resp = await self._session.request(method, self.make_url(path), **kwargs)
+ # save it to close later
+ self._responses.append(resp)
+ return resp
+
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
+
+ def request(
+ self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions]
+ ) -> _RequestContextManager: ...
+
+ def get(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def options(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def head(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def post(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def put(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def patch(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def delete(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ else:
+
+ def request(
+ self, method: str, path: StrOrURL, **kwargs: Any
+ ) -> _RequestContextManager:
+ """Routes a request to tested http server.
+
+ The interface is identical to aiohttp.ClientSession.request,
+ except the loop kwarg is overridden by the instance used by the
+ test server.
+
+ """
+ return _RequestContextManager(self._request(method, path, **kwargs))
+
+ def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP GET request."""
+ return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
+
+ def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP POST request."""
+ return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
+
+ def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP OPTIONS request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_OPTIONS, path, **kwargs)
+ )
+
+ def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP HEAD request."""
+ return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
+
+ def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PUT request."""
+ return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
+
+ def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PATCH, path, **kwargs)
+ )
+
+ def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_DELETE, path, **kwargs)
+ )
+
+ def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
+ """Initiate websocket connection.
+
+ The api corresponds to aiohttp.ClientSession.ws_connect.
+
+ """
+ return _WSRequestContextManager(self._ws_connect(path, **kwargs))
+
+ async def _ws_connect(
+ self, path: StrOrURL, **kwargs: Any
+ ) -> ClientWebSocketResponse:
+ ws = await self._session.ws_connect(self.make_url(path), **kwargs)
+ self._websockets.append(ws)
+ return ws
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run on exit when used as a(n) (asynchronous)
+ context manager.
+
+ """
+ if not self._closed:
+ for resp in self._responses:
+ resp.close()
+ for ws in self._websockets:
+ await ws.close()
+ await self._session.close()
+ await self._server.close()
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> Self:
+ await self.start_server()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class AioHTTPTestCase(IsolatedAsyncioTestCase):
+ """A base class to allow for unittest web applications using aiohttp.
+
+ Provides the following:
+
+ * self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
+ * self.loop (asyncio.BaseEventLoop): the event loop in which the
+ application and server are running.
+ * self.app (aiohttp.web.Application): the application returned by
+ self.get_application()
+
+ Note that the TestClient's methods are asynchronous: you have to
+ execute function on the test client using asynchronous methods.
+ """
+
+ async def get_application(self) -> Application:
+ """Get application.
+
+ This method should be overridden
+ to return the aiohttp.web.Application
+ object to test.
+ """
+ return self.get_app()
+
+ def get_app(self) -> Application:
+ """Obsolete method used to constructing web application.
+
+ Use .get_application() coroutine instead.
+ """
+ raise RuntimeError("Did you forget to define get_application()?")
+
+ async def asyncSetUp(self) -> None:
+ self.loop = asyncio.get_running_loop()
+ return await self.setUpAsync()
+
+ async def setUpAsync(self) -> None:
+ self.app = await self.get_application()
+ self.server = await self.get_server(self.app)
+ self.client = await self.get_client(self.server)
+
+ await self.client.start_server()
+
+ async def asyncTearDown(self) -> None:
+ return await self.tearDownAsync()
+
+ async def tearDownAsync(self) -> None:
+ await self.client.close()
+
+ async def get_server(self, app: Application) -> TestServer:
+ """Return a TestServer instance."""
+ return TestServer(app, loop=self.loop)
+
+ async def get_client(self, server: TestServer) -> TestClient[Request, Application]:
+ """Return a TestClient instance."""
+ return TestClient(server, loop=self.loop)
+
+
+def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
+ """
+ A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
+
+ In 3.8+, this does nothing.
+ """
+ warnings.warn(
+ "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func
+
+
+_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
+
+
+@contextlib.contextmanager
+def loop_context(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
+) -> Iterator[asyncio.AbstractEventLoop]:
+ """A contextmanager that creates an event_loop, for test purposes.
+
+ Handles the creation and cleanup of a test loop.
+ """
+ loop = setup_test_loop(loop_factory)
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def setup_test_loop(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
+) -> asyncio.AbstractEventLoop:
+ """Create and return an asyncio.BaseEventLoop instance.
+
+ The caller should also call teardown_test_loop,
+ once they are done with the loop.
+ """
+ loop = loop_factory()
+ asyncio.set_event_loop(loop)
+ return loop
+
+
+def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
+ """Teardown and cleanup an event_loop created by setup_test_loop."""
+ closed = loop.is_closed()
+ if not closed:
+ loop.call_soon(loop.stop)
+ loop.run_forever()
+ loop.close()
+
+ if not fast:
+ gc.collect()
+
+ asyncio.set_event_loop(None)
+
+
+def _create_app_mock() -> mock.MagicMock:
+ def get_dict(app: Any, key: str) -> Any:
+ return app.__app_dict[key]
+
+ def set_dict(app: Any, key: str, value: Any) -> None:
+ app.__app_dict[key] = value
+
+ app = mock.MagicMock(spec=Application)
+ app.__app_dict = {}
+ app.__getitem__ = get_dict
+ app.__setitem__ = set_dict
+
+ app._debug = False
+ app.on_response_prepare = Signal(app)
+ app.on_response_prepare.freeze()
+ return app
+
+
+def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
+ transport = mock.Mock()
+
+ def get_extra_info(key: str) -> Optional[SSLContext]:
+ if key == "sslcontext":
+ return sslcontext
+ else:
+ return None
+
+ transport.get_extra_info.side_effect = get_extra_info
+ return transport
+
+
+def make_mocked_request(
+ method: str,
+ path: str,
+ headers: Any = None,
+ *,
+ match_info: Any = sentinel,
+ version: HttpVersion = HttpVersion(1, 1),
+ closing: bool = False,
+ app: Any = None,
+ writer: Any = sentinel,
+ protocol: Any = sentinel,
+ transport: Any = sentinel,
+ payload: StreamReader = EMPTY_PAYLOAD,
+ sslcontext: Optional[SSLContext] = None,
+ client_max_size: int = 1024**2,
+ loop: Any = ...,
+) -> Request:
+ """Creates mocked web.Request testing purposes.
+
+ Useful in unit tests, when spinning full web server is overkill or
+ specific conditions and errors are hard to trigger.
+ """
+ task = mock.Mock()
+ if loop is ...:
+ # no loop passed, try to get the current one if
+ # its is running as we need a real loop to create
+ # executor jobs to be able to do testing
+ # with a real executor
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = mock.Mock()
+ loop.create_future.return_value = ()
+
+ if version < HttpVersion(1, 1):
+ closing = True
+
+ if headers:
+ headers = CIMultiDictProxy(CIMultiDict(headers))
+ raw_hdrs = tuple(
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
+ )
+ else:
+ headers = CIMultiDictProxy(CIMultiDict())
+ raw_hdrs = ()
+
+ chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ message = RawRequestMessage(
+ method,
+ path,
+ version,
+ headers,
+ raw_hdrs,
+ closing,
+ None,
+ False,
+ chunked,
+ URL(path),
+ )
+ if app is None:
+ app = _create_app_mock()
+
+ if transport is sentinel:
+ transport = _create_transport(sslcontext)
+
+ if protocol is sentinel:
+ protocol = mock.Mock()
+ protocol.transport = transport
+ type(protocol).peername = mock.PropertyMock(
+ return_value=transport.get_extra_info("peername")
+ )
+ type(protocol).ssl_context = mock.PropertyMock(return_value=sslcontext)
+
+ if writer is sentinel:
+ writer = mock.Mock()
+ writer.write_headers = make_mocked_coro(None)
+ writer.write = make_mocked_coro(None)
+ writer.write_eof = make_mocked_coro(None)
+ writer.drain = make_mocked_coro(None)
+ writer.transport = transport
+
+ protocol.transport = transport
+ protocol.writer = writer
+
+ req = Request(
+ message, payload, protocol, writer, task, loop, client_max_size=client_max_size
+ )
+
+ match_info = UrlMappingMatchInfo(
+ {} if match_info is sentinel else match_info, mock.Mock()
+ )
+ match_info.add_app(app)
+ req._match_info = match_info
+
+ return req
+
+
+def make_mocked_coro(
+ return_value: Any = sentinel, raise_exception: Any = sentinel
+) -> Any:
+ """Creates a coroutine mock."""
+
+ async def mock_coro(*args: Any, **kwargs: Any) -> Any:
+ if raise_exception is not sentinel:
+ raise raise_exception
+ if not inspect.isawaitable(return_value):
+ return return_value
+ await return_value
+
+ return mock.Mock(wraps=mock_coro)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/tracing.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/tracing.py"
new file mode 100644
index 0000000..568fa7f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/tracing.py"
@@ -0,0 +1,455 @@
+from types import SimpleNamespace
+from typing import TYPE_CHECKING, Mapping, Optional, Type, TypeVar
+
+import attr
+from aiosignal import Signal
+from multidict import CIMultiDict
+from yarl import URL
+
+from .client_reqrep import ClientResponse
+
+if TYPE_CHECKING:
+ from .client import ClientSession
+
+ _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
+ _TracingSignal = Signal[ClientSession, SimpleNamespace, _ParamT_contra]
+
+
+__all__ = (
+ "TraceConfig",
+ "TraceRequestStartParams",
+ "TraceRequestEndParams",
+ "TraceRequestExceptionParams",
+ "TraceConnectionQueuedStartParams",
+ "TraceConnectionQueuedEndParams",
+ "TraceConnectionCreateStartParams",
+ "TraceConnectionCreateEndParams",
+ "TraceConnectionReuseconnParams",
+ "TraceDnsResolveHostStartParams",
+ "TraceDnsResolveHostEndParams",
+ "TraceDnsCacheHitParams",
+ "TraceDnsCacheMissParams",
+ "TraceRequestRedirectParams",
+ "TraceRequestChunkSentParams",
+ "TraceResponseChunkReceivedParams",
+ "TraceRequestHeadersSentParams",
+)
+
+
+class TraceConfig:
+ """First-class used to trace requests launched via ClientSession objects."""
+
+ def __init__(
+ self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
+ ) -> None:
+ self._on_request_start: _TracingSignal[TraceRequestStartParams] = Signal(self)
+ self._on_request_chunk_sent: _TracingSignal[TraceRequestChunkSentParams] = (
+ Signal(self)
+ )
+ self._on_response_chunk_received: _TracingSignal[
+ TraceResponseChunkReceivedParams
+ ] = Signal(self)
+ self._on_request_end: _TracingSignal[TraceRequestEndParams] = Signal(self)
+ self._on_request_exception: _TracingSignal[TraceRequestExceptionParams] = (
+ Signal(self)
+ )
+ self._on_request_redirect: _TracingSignal[TraceRequestRedirectParams] = Signal(
+ self
+ )
+ self._on_connection_queued_start: _TracingSignal[
+ TraceConnectionQueuedStartParams
+ ] = Signal(self)
+ self._on_connection_queued_end: _TracingSignal[
+ TraceConnectionQueuedEndParams
+ ] = Signal(self)
+ self._on_connection_create_start: _TracingSignal[
+ TraceConnectionCreateStartParams
+ ] = Signal(self)
+ self._on_connection_create_end: _TracingSignal[
+ TraceConnectionCreateEndParams
+ ] = Signal(self)
+ self._on_connection_reuseconn: _TracingSignal[
+ TraceConnectionReuseconnParams
+ ] = Signal(self)
+ self._on_dns_resolvehost_start: _TracingSignal[
+ TraceDnsResolveHostStartParams
+ ] = Signal(self)
+ self._on_dns_resolvehost_end: _TracingSignal[TraceDnsResolveHostEndParams] = (
+ Signal(self)
+ )
+ self._on_dns_cache_hit: _TracingSignal[TraceDnsCacheHitParams] = Signal(self)
+ self._on_dns_cache_miss: _TracingSignal[TraceDnsCacheMissParams] = Signal(self)
+ self._on_request_headers_sent: _TracingSignal[TraceRequestHeadersSentParams] = (
+ Signal(self)
+ )
+
+ self._trace_config_ctx_factory = trace_config_ctx_factory
+
+ def trace_config_ctx(
+ self, trace_request_ctx: Optional[Mapping[str, str]] = None
+ ) -> SimpleNamespace:
+ """Return a new trace_config_ctx instance"""
+ return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
+
+ def freeze(self) -> None:
+ self._on_request_start.freeze()
+ self._on_request_chunk_sent.freeze()
+ self._on_response_chunk_received.freeze()
+ self._on_request_end.freeze()
+ self._on_request_exception.freeze()
+ self._on_request_redirect.freeze()
+ self._on_connection_queued_start.freeze()
+ self._on_connection_queued_end.freeze()
+ self._on_connection_create_start.freeze()
+ self._on_connection_create_end.freeze()
+ self._on_connection_reuseconn.freeze()
+ self._on_dns_resolvehost_start.freeze()
+ self._on_dns_resolvehost_end.freeze()
+ self._on_dns_cache_hit.freeze()
+ self._on_dns_cache_miss.freeze()
+ self._on_request_headers_sent.freeze()
+
+ @property
+ def on_request_start(self) -> "_TracingSignal[TraceRequestStartParams]":
+ return self._on_request_start
+
+ @property
+ def on_request_chunk_sent(
+ self,
+ ) -> "_TracingSignal[TraceRequestChunkSentParams]":
+ return self._on_request_chunk_sent
+
+ @property
+ def on_response_chunk_received(
+ self,
+ ) -> "_TracingSignal[TraceResponseChunkReceivedParams]":
+ return self._on_response_chunk_received
+
+ @property
+ def on_request_end(self) -> "_TracingSignal[TraceRequestEndParams]":
+ return self._on_request_end
+
+ @property
+ def on_request_exception(
+ self,
+ ) -> "_TracingSignal[TraceRequestExceptionParams]":
+ return self._on_request_exception
+
+ @property
+ def on_request_redirect(
+ self,
+ ) -> "_TracingSignal[TraceRequestRedirectParams]":
+ return self._on_request_redirect
+
+ @property
+ def on_connection_queued_start(
+ self,
+ ) -> "_TracingSignal[TraceConnectionQueuedStartParams]":
+ return self._on_connection_queued_start
+
+ @property
+ def on_connection_queued_end(
+ self,
+ ) -> "_TracingSignal[TraceConnectionQueuedEndParams]":
+ return self._on_connection_queued_end
+
+ @property
+ def on_connection_create_start(
+ self,
+ ) -> "_TracingSignal[TraceConnectionCreateStartParams]":
+ return self._on_connection_create_start
+
+ @property
+ def on_connection_create_end(
+ self,
+ ) -> "_TracingSignal[TraceConnectionCreateEndParams]":
+ return self._on_connection_create_end
+
+ @property
+ def on_connection_reuseconn(
+ self,
+ ) -> "_TracingSignal[TraceConnectionReuseconnParams]":
+ return self._on_connection_reuseconn
+
+ @property
+ def on_dns_resolvehost_start(
+ self,
+ ) -> "_TracingSignal[TraceDnsResolveHostStartParams]":
+ return self._on_dns_resolvehost_start
+
+ @property
+ def on_dns_resolvehost_end(
+ self,
+ ) -> "_TracingSignal[TraceDnsResolveHostEndParams]":
+ return self._on_dns_resolvehost_end
+
+ @property
+ def on_dns_cache_hit(self) -> "_TracingSignal[TraceDnsCacheHitParams]":
+ return self._on_dns_cache_hit
+
+ @property
+ def on_dns_cache_miss(self) -> "_TracingSignal[TraceDnsCacheMissParams]":
+ return self._on_dns_cache_miss
+
+ @property
+ def on_request_headers_sent(
+ self,
+ ) -> "_TracingSignal[TraceRequestHeadersSentParams]":
+ return self._on_request_headers_sent
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestStartParams:
+ """Parameters sent by the `on_request_start` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestChunkSentParams:
+ """Parameters sent by the `on_request_chunk_sent` signal"""
+
+ method: str
+ url: URL
+ chunk: bytes
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceResponseChunkReceivedParams:
+ """Parameters sent by the `on_response_chunk_received` signal"""
+
+ method: str
+ url: URL
+ chunk: bytes
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestEndParams:
+ """Parameters sent by the `on_request_end` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ response: ClientResponse
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestExceptionParams:
+ """Parameters sent by the `on_request_exception` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ exception: BaseException
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestRedirectParams:
+ """Parameters sent by the `on_request_redirect` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ response: ClientResponse
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionQueuedStartParams:
+ """Parameters sent by the `on_connection_queued_start` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionQueuedEndParams:
+ """Parameters sent by the `on_connection_queued_end` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionCreateStartParams:
+ """Parameters sent by the `on_connection_create_start` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionCreateEndParams:
+ """Parameters sent by the `on_connection_create_end` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionReuseconnParams:
+ """Parameters sent by the `on_connection_reuseconn` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsResolveHostStartParams:
+ """Parameters sent by the `on_dns_resolvehost_start` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsResolveHostEndParams:
+ """Parameters sent by the `on_dns_resolvehost_end` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsCacheHitParams:
+ """Parameters sent by the `on_dns_cache_hit` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsCacheMissParams:
+ """Parameters sent by the `on_dns_cache_miss` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestHeadersSentParams:
+ """Parameters sent by the `on_request_headers_sent` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+
+
+class Trace:
+ """Internal dependency holder class.
+
+ Used to keep together the main dependencies used
+ at the moment of send a signal.
+ """
+
+ def __init__(
+ self,
+ session: "ClientSession",
+ trace_config: TraceConfig,
+ trace_config_ctx: SimpleNamespace,
+ ) -> None:
+ self._trace_config = trace_config
+ self._trace_config_ctx = trace_config_ctx
+ self._session = session
+
+ async def send_request_start(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ return await self._trace_config.on_request_start.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestStartParams(method, url, headers),
+ )
+
+ async def send_request_chunk_sent(
+ self, method: str, url: URL, chunk: bytes
+ ) -> None:
+ return await self._trace_config.on_request_chunk_sent.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestChunkSentParams(method, url, chunk),
+ )
+
+ async def send_response_chunk_received(
+ self, method: str, url: URL, chunk: bytes
+ ) -> None:
+ return await self._trace_config.on_response_chunk_received.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceResponseChunkReceivedParams(method, url, chunk),
+ )
+
+ async def send_request_end(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ response: ClientResponse,
+ ) -> None:
+ return await self._trace_config.on_request_end.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestEndParams(method, url, headers, response),
+ )
+
+ async def send_request_exception(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ exception: BaseException,
+ ) -> None:
+ return await self._trace_config.on_request_exception.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestExceptionParams(method, url, headers, exception),
+ )
+
+ async def send_request_redirect(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ response: ClientResponse,
+ ) -> None:
+ return await self._trace_config._on_request_redirect.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestRedirectParams(method, url, headers, response),
+ )
+
+ async def send_connection_queued_start(self) -> None:
+ return await self._trace_config.on_connection_queued_start.send(
+ self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
+ )
+
+ async def send_connection_queued_end(self) -> None:
+ return await self._trace_config.on_connection_queued_end.send(
+ self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
+ )
+
+ async def send_connection_create_start(self) -> None:
+ return await self._trace_config.on_connection_create_start.send(
+ self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
+ )
+
+ async def send_connection_create_end(self) -> None:
+ return await self._trace_config.on_connection_create_end.send(
+ self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
+ )
+
+ async def send_connection_reuseconn(self) -> None:
+ return await self._trace_config.on_connection_reuseconn.send(
+ self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
+ )
+
+ async def send_dns_resolvehost_start(self, host: str) -> None:
+ return await self._trace_config.on_dns_resolvehost_start.send(
+ self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
+ )
+
+ async def send_dns_resolvehost_end(self, host: str) -> None:
+ return await self._trace_config.on_dns_resolvehost_end.send(
+ self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
+ )
+
+ async def send_dns_cache_hit(self, host: str) -> None:
+ return await self._trace_config.on_dns_cache_hit.send(
+ self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
+ )
+
+ async def send_dns_cache_miss(self, host: str) -> None:
+ return await self._trace_config.on_dns_cache_miss.send(
+ self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
+ )
+
+ async def send_request_headers(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ return await self._trace_config._on_request_headers_sent.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestHeadersSentParams(method, url, headers),
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/typedefs.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/typedefs.py"
new file mode 100644
index 0000000..cc8c082
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/typedefs.py"
@@ -0,0 +1,69 @@
+import json
+import os
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ Mapping,
+ Protocol,
+ Tuple,
+ Union,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
+from yarl import URL, Query as _Query
+
+Query = _Query
+
+DEFAULT_JSON_ENCODER = json.dumps
+DEFAULT_JSON_DECODER = json.loads
+
+if TYPE_CHECKING:
+ _CIMultiDict = CIMultiDict[str]
+ _CIMultiDictProxy = CIMultiDictProxy[str]
+ _MultiDict = MultiDict[str]
+ _MultiDictProxy = MultiDictProxy[str]
+ from http.cookies import BaseCookie, Morsel
+
+ from .web import Request, StreamResponse
+else:
+ _CIMultiDict = CIMultiDict
+ _CIMultiDictProxy = CIMultiDictProxy
+ _MultiDict = MultiDict
+ _MultiDictProxy = MultiDictProxy
+
+Byteish = Union[bytes, bytearray, memoryview]
+JSONEncoder = Callable[[Any], str]
+JSONDecoder = Callable[[str], Any]
+LooseHeaders = Union[
+ Mapping[str, str],
+ Mapping[istr, str],
+ _CIMultiDict,
+ _CIMultiDictProxy,
+ Iterable[Tuple[Union[str, istr], str]],
+]
+RawHeaders = Tuple[Tuple[bytes, bytes], ...]
+StrOrURL = Union[str, URL]
+
+LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+LooseCookiesIterables = Iterable[
+ Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+]
+LooseCookies = Union[
+ LooseCookiesMappings,
+ LooseCookiesIterables,
+ "BaseCookie[str]",
+]
+
+Handler = Callable[["Request"], Awaitable["StreamResponse"]]
+
+
+class Middleware(Protocol):
+ def __call__(
+ self, request: "Request", handler: Handler
+ ) -> Awaitable["StreamResponse"]: ...
+
+
+PathLike = Union[str, "os.PathLike[str]"]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web.py"
new file mode 100644
index 0000000..5a1fc96
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web.py"
@@ -0,0 +1,592 @@
+import asyncio
+import logging
+import os
+import socket
+import sys
+import warnings
+from argparse import ArgumentParser
+from collections.abc import Iterable
+from contextlib import suppress
+from importlib import import_module
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable as TypingIterable,
+ List,
+ Optional,
+ Set,
+ Type,
+ Union,
+ cast,
+)
+
+from .abc import AbstractAccessLogger
+from .helpers import AppKey as AppKey
+from .log import access_logger
+from .typedefs import PathLike
+from .web_app import Application as Application, CleanupError as CleanupError
+from .web_exceptions import (
+ HTTPAccepted as HTTPAccepted,
+ HTTPBadGateway as HTTPBadGateway,
+ HTTPBadRequest as HTTPBadRequest,
+ HTTPClientError as HTTPClientError,
+ HTTPConflict as HTTPConflict,
+ HTTPCreated as HTTPCreated,
+ HTTPError as HTTPError,
+ HTTPException as HTTPException,
+ HTTPExpectationFailed as HTTPExpectationFailed,
+ HTTPFailedDependency as HTTPFailedDependency,
+ HTTPForbidden as HTTPForbidden,
+ HTTPFound as HTTPFound,
+ HTTPGatewayTimeout as HTTPGatewayTimeout,
+ HTTPGone as HTTPGone,
+ HTTPInsufficientStorage as HTTPInsufficientStorage,
+ HTTPInternalServerError as HTTPInternalServerError,
+ HTTPLengthRequired as HTTPLengthRequired,
+ HTTPMethodNotAllowed as HTTPMethodNotAllowed,
+ HTTPMisdirectedRequest as HTTPMisdirectedRequest,
+ HTTPMove as HTTPMove,
+ HTTPMovedPermanently as HTTPMovedPermanently,
+ HTTPMultipleChoices as HTTPMultipleChoices,
+ HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
+ HTTPNoContent as HTTPNoContent,
+ HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
+ HTTPNotAcceptable as HTTPNotAcceptable,
+ HTTPNotExtended as HTTPNotExtended,
+ HTTPNotFound as HTTPNotFound,
+ HTTPNotImplemented as HTTPNotImplemented,
+ HTTPNotModified as HTTPNotModified,
+ HTTPOk as HTTPOk,
+ HTTPPartialContent as HTTPPartialContent,
+ HTTPPaymentRequired as HTTPPaymentRequired,
+ HTTPPermanentRedirect as HTTPPermanentRedirect,
+ HTTPPreconditionFailed as HTTPPreconditionFailed,
+ HTTPPreconditionRequired as HTTPPreconditionRequired,
+ HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
+ HTTPRedirection as HTTPRedirection,
+ HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
+ HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
+ HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
+ HTTPRequestTimeout as HTTPRequestTimeout,
+ HTTPRequestURITooLong as HTTPRequestURITooLong,
+ HTTPResetContent as HTTPResetContent,
+ HTTPSeeOther as HTTPSeeOther,
+ HTTPServerError as HTTPServerError,
+ HTTPServiceUnavailable as HTTPServiceUnavailable,
+ HTTPSuccessful as HTTPSuccessful,
+ HTTPTemporaryRedirect as HTTPTemporaryRedirect,
+ HTTPTooManyRequests as HTTPTooManyRequests,
+ HTTPUnauthorized as HTTPUnauthorized,
+ HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
+ HTTPUnprocessableEntity as HTTPUnprocessableEntity,
+ HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
+ HTTPUpgradeRequired as HTTPUpgradeRequired,
+ HTTPUseProxy as HTTPUseProxy,
+ HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
+ HTTPVersionNotSupported as HTTPVersionNotSupported,
+ NotAppKeyWarning as NotAppKeyWarning,
+)
+from .web_fileresponse import FileResponse as FileResponse
+from .web_log import AccessLogger
+from .web_middlewares import (
+ middleware as middleware,
+ normalize_path_middleware as normalize_path_middleware,
+)
+from .web_protocol import (
+ PayloadAccessError as PayloadAccessError,
+ RequestHandler as RequestHandler,
+ RequestPayloadError as RequestPayloadError,
+)
+from .web_request import (
+ BaseRequest as BaseRequest,
+ FileField as FileField,
+ Request as Request,
+)
+from .web_response import (
+ ContentCoding as ContentCoding,
+ Response as Response,
+ StreamResponse as StreamResponse,
+ json_response as json_response,
+)
+from .web_routedef import (
+ AbstractRouteDef as AbstractRouteDef,
+ RouteDef as RouteDef,
+ RouteTableDef as RouteTableDef,
+ StaticDef as StaticDef,
+ delete as delete,
+ get as get,
+ head as head,
+ options as options,
+ patch as patch,
+ post as post,
+ put as put,
+ route as route,
+ static as static,
+ view as view,
+)
+from .web_runner import (
+ AppRunner as AppRunner,
+ BaseRunner as BaseRunner,
+ BaseSite as BaseSite,
+ GracefulExit as GracefulExit,
+ NamedPipeSite as NamedPipeSite,
+ ServerRunner as ServerRunner,
+ SockSite as SockSite,
+ TCPSite as TCPSite,
+ UnixSite as UnixSite,
+)
+from .web_server import Server as Server
+from .web_urldispatcher import (
+ AbstractResource as AbstractResource,
+ AbstractRoute as AbstractRoute,
+ DynamicResource as DynamicResource,
+ PlainResource as PlainResource,
+ PrefixedSubAppResource as PrefixedSubAppResource,
+ Resource as Resource,
+ ResourceRoute as ResourceRoute,
+ StaticResource as StaticResource,
+ UrlDispatcher as UrlDispatcher,
+ UrlMappingMatchInfo as UrlMappingMatchInfo,
+ View as View,
+)
+from .web_ws import (
+ WebSocketReady as WebSocketReady,
+ WebSocketResponse as WebSocketResponse,
+ WSMsgType as WSMsgType,
+)
+
+__all__ = (
+ # web_app
+ "AppKey",
+ "Application",
+ "CleanupError",
+ # web_exceptions
+ "NotAppKeyWarning",
+ "HTTPAccepted",
+ "HTTPBadGateway",
+ "HTTPBadRequest",
+ "HTTPClientError",
+ "HTTPConflict",
+ "HTTPCreated",
+ "HTTPError",
+ "HTTPException",
+ "HTTPExpectationFailed",
+ "HTTPFailedDependency",
+ "HTTPForbidden",
+ "HTTPFound",
+ "HTTPGatewayTimeout",
+ "HTTPGone",
+ "HTTPInsufficientStorage",
+ "HTTPInternalServerError",
+ "HTTPLengthRequired",
+ "HTTPMethodNotAllowed",
+ "HTTPMisdirectedRequest",
+ "HTTPMove",
+ "HTTPMovedPermanently",
+ "HTTPMultipleChoices",
+ "HTTPNetworkAuthenticationRequired",
+ "HTTPNoContent",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNotAcceptable",
+ "HTTPNotExtended",
+ "HTTPNotFound",
+ "HTTPNotImplemented",
+ "HTTPNotModified",
+ "HTTPOk",
+ "HTTPPartialContent",
+ "HTTPPaymentRequired",
+ "HTTPPermanentRedirect",
+ "HTTPPreconditionFailed",
+ "HTTPPreconditionRequired",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRedirection",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPRequestTimeout",
+ "HTTPRequestURITooLong",
+ "HTTPResetContent",
+ "HTTPSeeOther",
+ "HTTPServerError",
+ "HTTPServiceUnavailable",
+ "HTTPSuccessful",
+ "HTTPTemporaryRedirect",
+ "HTTPTooManyRequests",
+ "HTTPUnauthorized",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPUnprocessableEntity",
+ "HTTPUnsupportedMediaType",
+ "HTTPUpgradeRequired",
+ "HTTPUseProxy",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPVersionNotSupported",
+ # web_fileresponse
+ "FileResponse",
+ # web_middlewares
+ "middleware",
+ "normalize_path_middleware",
+ # web_protocol
+ "PayloadAccessError",
+ "RequestHandler",
+ "RequestPayloadError",
+ # web_request
+ "BaseRequest",
+ "FileField",
+ "Request",
+ # web_response
+ "ContentCoding",
+ "Response",
+ "StreamResponse",
+ "json_response",
+ # web_routedef
+ "AbstractRouteDef",
+ "RouteDef",
+ "RouteTableDef",
+ "StaticDef",
+ "delete",
+ "get",
+ "head",
+ "options",
+ "patch",
+ "post",
+ "put",
+ "route",
+ "static",
+ "view",
+ # web_runner
+ "AppRunner",
+ "BaseRunner",
+ "BaseSite",
+ "GracefulExit",
+ "ServerRunner",
+ "SockSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ # web_server
+ "Server",
+ # web_urldispatcher
+ "AbstractResource",
+ "AbstractRoute",
+ "DynamicResource",
+ "PlainResource",
+ "PrefixedSubAppResource",
+ "Resource",
+ "ResourceRoute",
+ "StaticResource",
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "View",
+ # web_ws
+ "WebSocketReady",
+ "WebSocketResponse",
+ "WSMsgType",
+ # web
+ "run_app",
+)
+
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ try:
+ from ssl import SSLContext
+ except ImportError: # pragma: no cover
+ SSLContext = object # type: ignore[misc,assignment]
+
+# Only display warning when using -Wdefault, -We, -X dev or similar.
+warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
+
+HostSequence = TypingIterable[str]
+
+
+async def _run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
+ ssl_context: Optional[SSLContext] = None,
+ print: Optional[Callable[..., None]] = print,
+ backlog: int = 128,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ **kwargs: Any, # TODO(PY311): Use Unpack
+) -> None:
+ # An internal function to actually do all dirty job for application running
+ if asyncio.iscoroutine(app):
+ app = await app
+
+ app = cast(Application, app)
+
+ runner = AppRunner(app, **kwargs)
+
+ await runner.setup()
+
+ sites: List[BaseSite] = []
+
+ try:
+ if host is not None:
+ if isinstance(host, str):
+ sites.append(
+ TCPSite(
+ runner,
+ host,
+ port,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ else:
+ for h in host:
+ sites.append(
+ TCPSite(
+ runner,
+ h,
+ port,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ elif path is None and sock is None or port is not None:
+ sites.append(
+ TCPSite(
+ runner,
+ port=port,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+
+ if path is not None:
+ if isinstance(path, (str, os.PathLike)):
+ sites.append(
+ UnixSite(
+ runner,
+ path,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for p in path:
+ sites.append(
+ UnixSite(
+ runner,
+ p,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+
+ if sock is not None:
+ if not isinstance(sock, Iterable):
+ sites.append(
+ SockSite(
+ runner,
+ sock,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for s in sock:
+ sites.append(
+ SockSite(
+ runner,
+ s,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ for site in sites:
+ await site.start()
+
+ if print: # pragma: no branch
+ names = sorted(str(s.name) for s in runner.sites)
+ print(
+ "======== Running on {} ========\n"
+ "(Press CTRL+C to quit)".format(", ".join(names))
+ )
+
+ # sleep forever by 1 hour intervals,
+ while True:
+ await asyncio.sleep(3600)
+ finally:
+ await runner.cleanup()
+
+
+def _cancel_tasks(
+ to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
+) -> None:
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
+
+ for task in to_cancel:
+ if task.cancelled():
+ continue
+ if task.exception() is not None:
+ loop.call_exception_handler(
+ {
+ "message": "unhandled exception during asyncio.run() shutdown",
+ "exception": task.exception(),
+ "task": task,
+ }
+ )
+
+
+def run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Optional[Callable[..., None]] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ handler_cancellation: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+) -> None:
+ """Run an app locally"""
+ if loop is None:
+ loop = asyncio.new_event_loop()
+
+ # Configure if and only if in debugging mode and using the default logger
+ if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
+ if access_log.level == logging.NOTSET:
+ access_log.setLevel(logging.DEBUG)
+ if not access_log.hasHandlers():
+ access_log.addHandler(logging.StreamHandler())
+
+ main_task = loop.create_task(
+ _run_app(
+ app,
+ host=host,
+ port=port,
+ path=path,
+ sock=sock,
+ shutdown_timeout=shutdown_timeout,
+ keepalive_timeout=keepalive_timeout,
+ ssl_context=ssl_context,
+ print=print,
+ backlog=backlog,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ handle_signals=handle_signals,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ handler_cancellation=handler_cancellation,
+ **kwargs,
+ )
+ )
+
+ try:
+ asyncio.set_event_loop(loop)
+ loop.run_until_complete(main_task)
+ except (GracefulExit, KeyboardInterrupt): # pragma: no cover
+ pass
+ finally:
+ try:
+ main_task.cancel()
+ with suppress(asyncio.CancelledError):
+ loop.run_until_complete(main_task)
+ finally:
+ _cancel_tasks(asyncio.all_tasks(loop), loop)
+ loop.run_until_complete(loop.shutdown_asyncgens())
+ loop.close()
+
+
+def main(argv: List[str]) -> None:
+ arg_parser = ArgumentParser(
+ description="aiohttp.web Application server", prog="aiohttp.web"
+ )
+ arg_parser.add_argument(
+ "entry_func",
+ help=(
+ "Callable returning the `aiohttp.web.Application` instance to "
+ "run. Should be specified in the 'module:function' syntax."
+ ),
+ metavar="entry-func",
+ )
+ arg_parser.add_argument(
+ "-H",
+ "--hostname",
+ help="TCP/IP hostname to serve on (default: localhost)",
+ default=None,
+ )
+ arg_parser.add_argument(
+ "-P",
+ "--port",
+ help="TCP/IP port to serve on (default: %(default)r)",
+ type=int,
+ default=8080,
+ )
+ arg_parser.add_argument(
+ "-U",
+ "--path",
+ help="Unix file system path to serve on. Can be combined with hostname "
+ "to serve on both Unix and TCP.",
+ )
+ args, extra_argv = arg_parser.parse_known_args(argv)
+
+ # Import logic
+ mod_str, _, func_str = args.entry_func.partition(":")
+ if not func_str or not mod_str:
+ arg_parser.error("'entry-func' not in 'module:function' syntax")
+ if mod_str.startswith("."):
+ arg_parser.error("relative module names not supported")
+ try:
+ module = import_module(mod_str)
+ except ImportError as ex:
+ arg_parser.error(f"unable to import {mod_str}: {ex}")
+ try:
+ func = getattr(module, func_str)
+ except AttributeError:
+ arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
+
+ # Compatibility logic
+ if args.path is not None and not hasattr(socket, "AF_UNIX"):
+ arg_parser.error(
+ "file system paths not supported by your operating environment"
+ )
+
+ logging.basicConfig(level=logging.DEBUG)
+
+ if args.path and args.hostname is None:
+ host = port = None
+ else:
+ host = args.hostname or "localhost"
+ port = args.port
+
+ app = func(extra_argv)
+ run_app(app, host=host, port=port, path=args.path)
+ arg_parser.exit(message="Stopped\n")
+
+
+if __name__ == "__main__": # pragma: no branch
+ main(sys.argv[1:]) # pragma: no cover
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_app.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_app.py"
new file mode 100644
index 0000000..619c008
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_app.py"
@@ -0,0 +1,620 @@
+import asyncio
+import logging
+import warnings
+from functools import lru_cache, partial, update_wrapper
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
+from aiosignal import Signal
+from frozenlist import FrozenList
+
+from . import hdrs
+from .abc import (
+ AbstractAccessLogger,
+ AbstractMatchInfo,
+ AbstractRouter,
+ AbstractStreamWriter,
+)
+from .helpers import DEBUG, AppKey
+from .http_parser import RawRequestMessage
+from .log import web_logger
+from .streams import StreamReader
+from .typedefs import Handler, Middleware
+from .web_exceptions import NotAppKeyWarning
+from .web_log import AccessLogger
+from .web_middlewares import _fix_request_current_app
+from .web_protocol import RequestHandler
+from .web_request import Request
+from .web_response import StreamResponse
+from .web_routedef import AbstractRouteDef
+from .web_server import Server
+from .web_urldispatcher import (
+ AbstractResource,
+ AbstractRoute,
+ Domain,
+ MaskDomain,
+ MatchedSubAppResource,
+ PrefixedSubAppResource,
+ SystemRoute,
+ UrlDispatcher,
+)
+
+__all__ = ("Application", "CleanupError")
+
+
+if TYPE_CHECKING:
+ _AppSignal = Signal["Application"]
+ _RespPrepareSignal = Signal[Request, StreamResponse]
+ _Middlewares = FrozenList[Middleware]
+ _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]]
+ _Subapps = List["Application"]
+else:
+ # No type checker mode, skip types
+ _AppSignal = Signal
+ _RespPrepareSignal = Signal
+ _Middlewares = FrozenList
+ _MiddlewaresHandlers = Optional[Sequence]
+ _Subapps = List
+
+_T = TypeVar("_T")
+_U = TypeVar("_U")
+_Resource = TypeVar("_Resource", bound=AbstractResource)
+
+
+def _build_middlewares(
+ handler: Handler, apps: Tuple["Application", ...]
+) -> Callable[[Request], Awaitable[StreamResponse]]:
+ """Apply middlewares to handler."""
+ for app in apps[::-1]:
+ for m, _ in app._middlewares_handlers: # type: ignore[union-attr]
+ handler = update_wrapper(partial(m, handler=handler), handler)
+ return handler
+
+
+_cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares)
+
+
+class Application(MutableMapping[Union[str, AppKey[Any]], Any]):
+ ATTRS = frozenset(
+ [
+ "logger",
+ "_debug",
+ "_router",
+ "_loop",
+ "_handler_args",
+ "_middlewares",
+ "_middlewares_handlers",
+ "_has_legacy_middlewares",
+ "_run_middlewares",
+ "_state",
+ "_frozen",
+ "_pre_frozen",
+ "_subapps",
+ "_on_response_prepare",
+ "_on_startup",
+ "_on_shutdown",
+ "_on_cleanup",
+ "_client_max_size",
+ "_cleanup_ctx",
+ ]
+ )
+
+ def __init__(
+ self,
+ *,
+ logger: logging.Logger = web_logger,
+ router: Optional[UrlDispatcher] = None,
+ middlewares: Iterable[Middleware] = (),
+ handler_args: Optional[Mapping[str, Any]] = None,
+ client_max_size: int = 1024**2,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ debug: Any = ..., # mypy doesn't support ellipsis
+ ) -> None:
+ if router is None:
+ router = UrlDispatcher()
+ else:
+ warnings.warn(
+ "router argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+ assert isinstance(router, AbstractRouter), router
+
+ if loop is not None:
+ warnings.warn(
+ "loop argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+
+ if debug is not ...:
+ warnings.warn(
+ "debug argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+ self._debug = debug
+ self._router: UrlDispatcher = router
+ self._loop = loop
+ self._handler_args = handler_args
+ self.logger = logger
+
+ self._middlewares: _Middlewares = FrozenList(middlewares)
+
+ # initialized on freezing
+ self._middlewares_handlers: _MiddlewaresHandlers = None
+ # initialized on freezing
+ self._run_middlewares: Optional[bool] = None
+ self._has_legacy_middlewares: bool = True
+
+ self._state: Dict[Union[AppKey[Any], str], object] = {}
+ self._frozen = False
+ self._pre_frozen = False
+ self._subapps: _Subapps = []
+
+ self._on_response_prepare: _RespPrepareSignal = Signal(self)
+ self._on_startup: _AppSignal = Signal(self)
+ self._on_shutdown: _AppSignal = Signal(self)
+ self._on_cleanup: _AppSignal = Signal(self)
+ self._cleanup_ctx = CleanupContext()
+ self._on_startup.append(self._cleanup_ctx._on_startup)
+ self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
+ self._client_max_size = client_max_size
+
+ def __init_subclass__(cls: Type["Application"]) -> None:
+ warnings.warn(
+ "Inheritance class {} from web.Application "
+ "is discouraged".format(cls.__name__),
+ DeprecationWarning,
+ stacklevel=3,
+ )
+
+ if DEBUG: # pragma: no cover
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom web.Application.{} attribute "
+ "is discouraged".format(name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ # MutableMapping API
+
+ def __eq__(self, other: object) -> bool:
+ return self is other
+
+ @overload # type: ignore[override]
+ def __getitem__(self, key: AppKey[_T]) -> _T: ...
+
+ @overload
+ def __getitem__(self, key: str) -> Any: ...
+
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
+ return self._state[key]
+
+ def _check_frozen(self) -> None:
+ if self._frozen:
+ warnings.warn(
+ "Changing state of started or joined application is deprecated",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+
+ @overload # type: ignore[override]
+ def __setitem__(self, key: AppKey[_T], value: _T) -> None: ...
+
+ @overload
+ def __setitem__(self, key: str, value: Any) -> None: ...
+
+ def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None:
+ self._check_frozen()
+ if not isinstance(key, AppKey):
+ warnings.warn(
+ "It is recommended to use web.AppKey instances for keys.\n"
+ + "https://docs.aiohttp.org/en/stable/web_advanced.html"
+ + "#application-s-config",
+ category=NotAppKeyWarning,
+ stacklevel=2,
+ )
+ self._state[key] = value
+
+ def __delitem__(self, key: Union[str, AppKey[_T]]) -> None:
+ self._check_frozen()
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
+ return iter(self._state)
+
+ def __hash__(self) -> int:
+ return id(self)
+
+ @overload # type: ignore[override]
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
+
+ @overload
+ def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ...
+
+ @overload
+ def get(self, key: str, default: Any = ...) -> Any: ...
+
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
+ return self._state.get(key, default)
+
+ ########
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ # Technically the loop can be None
+ # but we mask it by explicit type cast
+ # to provide more convenient type annotation
+ warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
+ return cast(asyncio.AbstractEventLoop, self._loop)
+
+ def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if self._loop is not None and self._loop is not loop:
+ raise RuntimeError(
+ "web.Application instance initialized with different loop"
+ )
+
+ self._loop = loop
+
+ # set loop debug
+ if self._debug is ...:
+ self._debug = loop.get_debug()
+
+ # set loop to sub applications
+ for subapp in self._subapps:
+ subapp._set_loop(loop)
+
+ @property
+ def pre_frozen(self) -> bool:
+ return self._pre_frozen
+
+ def pre_freeze(self) -> None:
+ if self._pre_frozen:
+ return
+
+ self._pre_frozen = True
+ self._middlewares.freeze()
+ self._router.freeze()
+ self._on_response_prepare.freeze()
+ self._cleanup_ctx.freeze()
+ self._on_startup.freeze()
+ self._on_shutdown.freeze()
+ self._on_cleanup.freeze()
+ self._middlewares_handlers = tuple(self._prepare_middleware())
+ self._has_legacy_middlewares = any(
+ not new_style for _, new_style in self._middlewares_handlers
+ )
+
+ # If current app and any subapp do not have middlewares avoid run all
+ # of the code footprint that it implies, which have a middleware
+ # hardcoded per app that sets up the current_app attribute. If no
+ # middlewares are configured the handler will receive the proper
+ # current_app without needing all of this code.
+ self._run_middlewares = True if self.middlewares else False
+
+ for subapp in self._subapps:
+ subapp.pre_freeze()
+ self._run_middlewares = self._run_middlewares or subapp._run_middlewares
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ if self._frozen:
+ return
+
+ self.pre_freeze()
+ self._frozen = True
+ for subapp in self._subapps:
+ subapp.freeze()
+
+ @property
+ def debug(self) -> bool:
+ warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
+ return self._debug # type: ignore[no-any-return]
+
+ def _reg_subapp_signals(self, subapp: "Application") -> None:
+ def reg_handler(signame: str) -> None:
+ subsig = getattr(subapp, signame)
+
+ async def handler(app: "Application") -> None:
+ await subsig.send(subapp)
+
+ appsig = getattr(self, signame)
+ appsig.append(handler)
+
+ reg_handler("on_startup")
+ reg_handler("on_shutdown")
+ reg_handler("on_cleanup")
+
+ def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource:
+ if not isinstance(prefix, str):
+ raise TypeError("Prefix must be str")
+ prefix = prefix.rstrip("/")
+ if not prefix:
+ raise ValueError("Prefix cannot be empty")
+ factory = partial(PrefixedSubAppResource, prefix, subapp)
+ return self._add_subapp(factory, subapp)
+
+ def _add_subapp(
+ self, resource_factory: Callable[[], _Resource], subapp: "Application"
+ ) -> _Resource:
+ if self.frozen:
+ raise RuntimeError("Cannot add sub application to frozen application")
+ if subapp.frozen:
+ raise RuntimeError("Cannot add frozen application")
+ resource = resource_factory()
+ self.router.register_resource(resource)
+ self._reg_subapp_signals(subapp)
+ self._subapps.append(subapp)
+ subapp.pre_freeze()
+ if self._loop is not None:
+ subapp._set_loop(self._loop)
+ return resource
+
+ def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource:
+ if not isinstance(domain, str):
+ raise TypeError("Domain must be str")
+ elif "*" in domain:
+ rule: Domain = MaskDomain(domain)
+ else:
+ rule = Domain(domain)
+ factory = partial(MatchedSubAppResource, rule, subapp)
+ return self._add_subapp(factory, subapp)
+
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
+ return self.router.add_routes(routes)
+
+ @property
+ def on_response_prepare(self) -> _RespPrepareSignal:
+ return self._on_response_prepare
+
+ @property
+ def on_startup(self) -> _AppSignal:
+ return self._on_startup
+
+ @property
+ def on_shutdown(self) -> _AppSignal:
+ return self._on_shutdown
+
+ @property
+ def on_cleanup(self) -> _AppSignal:
+ return self._on_cleanup
+
+ @property
+ def cleanup_ctx(self) -> "CleanupContext":
+ return self._cleanup_ctx
+
+ @property
+ def router(self) -> UrlDispatcher:
+ return self._router
+
+ @property
+ def middlewares(self) -> _Middlewares:
+ return self._middlewares
+
+ def _make_handler(
+ self,
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ **kwargs: Any,
+ ) -> Server:
+
+ if not issubclass(access_log_class, AbstractAccessLogger):
+ raise TypeError(
+ "access_log_class must be subclass of "
+ "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
+ )
+
+ self._set_loop(loop)
+ self.freeze()
+
+ kwargs["debug"] = self._debug
+ kwargs["access_log_class"] = access_log_class
+ if self._handler_args:
+ for k, v in self._handler_args.items():
+ kwargs[k] = v
+
+ return Server(
+ self._handle, # type: ignore[arg-type]
+ request_factory=self._make_request,
+ loop=self._loop,
+ **kwargs,
+ )
+
+ def make_handler(
+ self,
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ **kwargs: Any,
+ ) -> Server:
+
+ warnings.warn(
+ "Application.make_handler(...) is deprecated, use AppRunner API instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return self._make_handler(
+ loop=loop, access_log_class=access_log_class, **kwargs
+ )
+
+ async def startup(self) -> None:
+ """Causes on_startup signal
+
+ Should be called in the event loop along with the request handler.
+ """
+ await self.on_startup.send(self)
+
+ async def shutdown(self) -> None:
+ """Causes on_shutdown signal
+
+ Should be called before cleanup()
+ """
+ await self.on_shutdown.send(self)
+
+ async def cleanup(self) -> None:
+ """Causes on_cleanup signal
+
+ Should be called after shutdown()
+ """
+ if self.on_cleanup.frozen:
+ await self.on_cleanup.send(self)
+ else:
+ # If an exception occurs in startup, ensure cleanup contexts are completed.
+ await self._cleanup_ctx._on_cleanup(self)
+
+ def _make_request(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: RequestHandler,
+ writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ _cls: Type[Request] = Request,
+ ) -> Request:
+ if TYPE_CHECKING:
+ assert self._loop is not None
+ return _cls(
+ message,
+ payload,
+ protocol,
+ writer,
+ task,
+ self._loop,
+ client_max_size=self._client_max_size,
+ )
+
+ def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]:
+ for m in reversed(self._middlewares):
+ if getattr(m, "__middleware_version__", None) == 1:
+ yield m, True
+ else:
+ warnings.warn(
+ f'old-style middleware "{m!r}" deprecated, see #2252',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ yield m, False
+
+ yield _fix_request_current_app(self), True
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ loop = asyncio.get_event_loop()
+ debug = loop.get_debug()
+ match_info = await self._router.resolve(request)
+ if debug: # pragma: no cover
+ if not isinstance(match_info, AbstractMatchInfo):
+ raise TypeError(
+ "match_info should be AbstractMatchInfo "
+ "instance, not {!r}".format(match_info)
+ )
+ match_info.add_app(self)
+
+ match_info.freeze()
+
+ request._match_info = match_info
+
+ if request.headers.get(hdrs.EXPECT):
+ resp = await match_info.expect_handler(request)
+ await request.writer.drain()
+ if resp is not None:
+ return resp
+
+ handler = match_info.handler
+
+ if self._run_middlewares:
+ # If its a SystemRoute, don't cache building the middlewares since
+ # they are constructed for every MatchInfoError as a new handler
+ # is made each time.
+ if not self._has_legacy_middlewares and not isinstance(
+ match_info.route, SystemRoute
+ ):
+ handler = _cached_build_middleware(handler, match_info.apps)
+ else:
+ for app in match_info.apps[::-1]:
+ for m, new_style in app._middlewares_handlers: # type: ignore[union-attr]
+ if new_style:
+ handler = update_wrapper(
+ partial(m, handler=handler), handler
+ )
+ else:
+ handler = await m(app, handler) # type: ignore[arg-type,assignment]
+
+ return await handler(request)
+
+ def __call__(self) -> "Application":
+ """gunicorn compatibility"""
+ return self
+
+ def __repr__(self) -> str:
+ return f"<Application 0x{id(self):x}>"
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class CleanupError(RuntimeError):
+ @property
+ def exceptions(self) -> List[BaseException]:
+ return cast(List[BaseException], self.args[1])
+
+
+if TYPE_CHECKING:
+ _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
+else:
+ _CleanupContextBase = FrozenList
+
+
+class CleanupContext(_CleanupContextBase):
+ def __init__(self) -> None:
+ super().__init__()
+ self._exits: List[AsyncIterator[None]] = []
+
+ async def _on_startup(self, app: Application) -> None:
+ for cb in self:
+ it = cb(app).__aiter__()
+ await it.__anext__()
+ self._exits.append(it)
+
+ async def _on_cleanup(self, app: Application) -> None:
+ errors = []
+ for it in reversed(self._exits):
+ try:
+ await it.__anext__()
+ except StopAsyncIteration:
+ pass
+ except (Exception, asyncio.CancelledError) as exc:
+ errors.append(exc)
+ else:
+ errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
+ if errors:
+ if len(errors) == 1:
+ raise errors[0]
+ else:
+ raise CleanupError("Multiple errors on cleanup stage", errors)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_exceptions.py"
new file mode 100644
index 0000000..ee2c1e7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_exceptions.py"
@@ -0,0 +1,452 @@
+import warnings
+from typing import Any, Dict, Iterable, List, Optional, Set # noqa
+
+from yarl import URL
+
+from .typedefs import LooseHeaders, StrOrURL
+from .web_response import Response
+
+__all__ = (
+ "HTTPException",
+ "HTTPError",
+ "HTTPRedirection",
+ "HTTPSuccessful",
+ "HTTPOk",
+ "HTTPCreated",
+ "HTTPAccepted",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNoContent",
+ "HTTPResetContent",
+ "HTTPPartialContent",
+ "HTTPMove",
+ "HTTPMultipleChoices",
+ "HTTPMovedPermanently",
+ "HTTPFound",
+ "HTTPSeeOther",
+ "HTTPNotModified",
+ "HTTPUseProxy",
+ "HTTPTemporaryRedirect",
+ "HTTPPermanentRedirect",
+ "HTTPClientError",
+ "HTTPBadRequest",
+ "HTTPUnauthorized",
+ "HTTPPaymentRequired",
+ "HTTPForbidden",
+ "HTTPNotFound",
+ "HTTPMethodNotAllowed",
+ "HTTPNotAcceptable",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRequestTimeout",
+ "HTTPConflict",
+ "HTTPGone",
+ "HTTPLengthRequired",
+ "HTTPPreconditionFailed",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestURITooLong",
+ "HTTPUnsupportedMediaType",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPExpectationFailed",
+ "HTTPMisdirectedRequest",
+ "HTTPUnprocessableEntity",
+ "HTTPFailedDependency",
+ "HTTPUpgradeRequired",
+ "HTTPPreconditionRequired",
+ "HTTPTooManyRequests",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPServerError",
+ "HTTPInternalServerError",
+ "HTTPNotImplemented",
+ "HTTPBadGateway",
+ "HTTPServiceUnavailable",
+ "HTTPGatewayTimeout",
+ "HTTPVersionNotSupported",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPInsufficientStorage",
+ "HTTPNotExtended",
+ "HTTPNetworkAuthenticationRequired",
+)
+
+
+class NotAppKeyWarning(UserWarning):
+ """Warning when not using AppKey in Application."""
+
+
+############################################################
+# HTTP Exceptions
+############################################################
+
+
+class HTTPException(Response, Exception):
+
+ # You should set in subclasses:
+ # status = 200
+
+ status_code = -1
+ empty_body = False
+
+ __http_exception__ = True
+
+ def __init__(
+ self,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ if body is not None:
+ warnings.warn(
+ "body argument is deprecated for http web exceptions",
+ DeprecationWarning,
+ )
+ Response.__init__(
+ self,
+ status=self.status_code,
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ Exception.__init__(self, self.reason)
+ if self.body is None and not self.empty_body:
+ self.text = f"{self.status}: {self.reason}"
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class HTTPError(HTTPException):
+ """Base class for exceptions with status codes in the 400s and 500s."""
+
+
+class HTTPRedirection(HTTPException):
+ """Base class for exceptions with status codes in the 300s."""
+
+
+class HTTPSuccessful(HTTPException):
+ """Base class for exceptions with status codes in the 200s."""
+
+
+class HTTPOk(HTTPSuccessful):
+ status_code = 200
+
+
+class HTTPCreated(HTTPSuccessful):
+ status_code = 201
+
+
+class HTTPAccepted(HTTPSuccessful):
+ status_code = 202
+
+
+class HTTPNonAuthoritativeInformation(HTTPSuccessful):
+ status_code = 203
+
+
+class HTTPNoContent(HTTPSuccessful):
+ status_code = 204
+ empty_body = True
+
+
+class HTTPResetContent(HTTPSuccessful):
+ status_code = 205
+ empty_body = True
+
+
+class HTTPPartialContent(HTTPSuccessful):
+ status_code = 206
+
+
+############################################################
+# 3xx redirection
+############################################################
+
+
+class HTTPMove(HTTPRedirection):
+ def __init__(
+ self,
+ location: StrOrURL,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ if not location:
+ raise ValueError("HTTP redirects need a location to redirect to.")
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Location"] = str(URL(location))
+ self.location = location
+
+
+class HTTPMultipleChoices(HTTPMove):
+ status_code = 300
+
+
+class HTTPMovedPermanently(HTTPMove):
+ status_code = 301
+
+
+class HTTPFound(HTTPMove):
+ status_code = 302
+
+
+# This one is safe after a POST (the redirected location will be
+# retrieved with GET):
+class HTTPSeeOther(HTTPMove):
+ status_code = 303
+
+
+class HTTPNotModified(HTTPRedirection):
+ # FIXME: this should include a date or etag header
+ status_code = 304
+ empty_body = True
+
+
+class HTTPUseProxy(HTTPMove):
+ # Not a move, but looks a little like one
+ status_code = 305
+
+
+class HTTPTemporaryRedirect(HTTPMove):
+ status_code = 307
+
+
+class HTTPPermanentRedirect(HTTPMove):
+ status_code = 308
+
+
+############################################################
+# 4xx client error
+############################################################
+
+
+class HTTPClientError(HTTPError):
+ pass
+
+
+class HTTPBadRequest(HTTPClientError):
+ status_code = 400
+
+
+class HTTPUnauthorized(HTTPClientError):
+ status_code = 401
+
+
+class HTTPPaymentRequired(HTTPClientError):
+ status_code = 402
+
+
+class HTTPForbidden(HTTPClientError):
+ status_code = 403
+
+
+class HTTPNotFound(HTTPClientError):
+ status_code = 404
+
+
+class HTTPMethodNotAllowed(HTTPClientError):
+ status_code = 405
+
+ def __init__(
+ self,
+ method: str,
+ allowed_methods: Iterable[str],
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ allow = ",".join(sorted(allowed_methods))
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Allow"] = allow
+ self.allowed_methods: Set[str] = set(allowed_methods)
+ self.method = method.upper()
+
+
+class HTTPNotAcceptable(HTTPClientError):
+ status_code = 406
+
+
+class HTTPProxyAuthenticationRequired(HTTPClientError):
+ status_code = 407
+
+
+class HTTPRequestTimeout(HTTPClientError):
+ status_code = 408
+
+
+class HTTPConflict(HTTPClientError):
+ status_code = 409
+
+
+class HTTPGone(HTTPClientError):
+ status_code = 410
+
+
+class HTTPLengthRequired(HTTPClientError):
+ status_code = 411
+
+
+class HTTPPreconditionFailed(HTTPClientError):
+ status_code = 412
+
+
+class HTTPRequestEntityTooLarge(HTTPClientError):
+ status_code = 413
+
+ def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
+ kwargs.setdefault(
+ "text",
+ "Maximum request body size {} exceeded, "
+ "actual body size {}".format(max_size, actual_size),
+ )
+ super().__init__(**kwargs)
+
+
+class HTTPRequestURITooLong(HTTPClientError):
+ status_code = 414
+
+
+class HTTPUnsupportedMediaType(HTTPClientError):
+ status_code = 415
+
+
+class HTTPRequestRangeNotSatisfiable(HTTPClientError):
+ status_code = 416
+
+
+class HTTPExpectationFailed(HTTPClientError):
+ status_code = 417
+
+
+class HTTPMisdirectedRequest(HTTPClientError):
+ status_code = 421
+
+
+class HTTPUnprocessableEntity(HTTPClientError):
+ status_code = 422
+
+
+class HTTPFailedDependency(HTTPClientError):
+ status_code = 424
+
+
+class HTTPUpgradeRequired(HTTPClientError):
+ status_code = 426
+
+
+class HTTPPreconditionRequired(HTTPClientError):
+ status_code = 428
+
+
+class HTTPTooManyRequests(HTTPClientError):
+ status_code = 429
+
+
+class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
+ status_code = 431
+
+
+class HTTPUnavailableForLegalReasons(HTTPClientError):
+ status_code = 451
+
+ def __init__(
+ self,
+ link: Optional[StrOrURL],
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self._link = None
+ if link:
+ self._link = URL(link)
+ self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"'
+
+ @property
+ def link(self) -> Optional[URL]:
+ return self._link
+
+
+############################################################
+# 5xx Server Error
+############################################################
+# Response status codes beginning with the digit "5" indicate cases in
+# which the server is aware that it has erred or is incapable of
+# performing the request. Except when responding to a HEAD request, the
+# server SHOULD include an entity containing an explanation of the error
+# situation, and whether it is a temporary or permanent condition. User
+# agents SHOULD display any included entity to the user. These response
+# codes are applicable to any request method.
+
+
+class HTTPServerError(HTTPError):
+ pass
+
+
+class HTTPInternalServerError(HTTPServerError):
+ status_code = 500
+
+
+class HTTPNotImplemented(HTTPServerError):
+ status_code = 501
+
+
+class HTTPBadGateway(HTTPServerError):
+ status_code = 502
+
+
+class HTTPServiceUnavailable(HTTPServerError):
+ status_code = 503
+
+
+class HTTPGatewayTimeout(HTTPServerError):
+ status_code = 504
+
+
+class HTTPVersionNotSupported(HTTPServerError):
+ status_code = 505
+
+
+class HTTPVariantAlsoNegotiates(HTTPServerError):
+ status_code = 506
+
+
+class HTTPInsufficientStorage(HTTPServerError):
+ status_code = 507
+
+
+class HTTPNotExtended(HTTPServerError):
+ status_code = 510
+
+
+class HTTPNetworkAuthenticationRequired(HTTPServerError):
+ status_code = 511
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_fileresponse.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_fileresponse.py"
new file mode 100644
index 0000000..26484b9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_fileresponse.py"
@@ -0,0 +1,418 @@
+import asyncio
+import io
+import os
+import pathlib
+import sys
+from contextlib import suppress
+from enum import Enum, auto
+from mimetypes import MimeTypes
+from stat import S_ISREG
+from types import MappingProxyType
+from typing import ( # noqa
+ IO,
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Final,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import ETAG_ANY, ETag, must_be_empty_body
+from .typedefs import LooseHeaders, PathLike
+from .web_exceptions import (
+ HTTPForbidden,
+ HTTPNotFound,
+ HTTPNotModified,
+ HTTPPartialContent,
+ HTTPPreconditionFailed,
+ HTTPRequestRangeNotSatisfiable,
+)
+from .web_response import StreamResponse
+
+__all__ = ("FileResponse",)
+
+if TYPE_CHECKING:
+ from .web_request import BaseRequest
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+
+
+NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
+
+CONTENT_TYPES: Final[MimeTypes] = MimeTypes()
+
+# File extension to IANA encodings map that will be checked in the order defined.
+ENCODING_EXTENSIONS = MappingProxyType(
+ {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")}
+)
+
+FALLBACK_CONTENT_TYPE = "application/octet-stream"
+
+# Provide additional MIME type/extension pairs to be recognized.
+# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only
+ADDITIONAL_CONTENT_TYPES = MappingProxyType(
+ {
+ "application/gzip": ".gz",
+ "application/x-brotli": ".br",
+ "application/x-bzip2": ".bz2",
+ "application/x-compress": ".Z",
+ "application/x-xz": ".xz",
+ }
+)
+
+
+class _FileResponseResult(Enum):
+ """The result of the file response."""
+
+ SEND_FILE = auto() # Ie a regular file to send
+ NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file
+ PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed
+ NOT_MODIFIED = auto() # 304 Not Modified
+
+
+# Add custom pairs and clear the encodings map so guess_type ignores them.
+CONTENT_TYPES.encodings_map.clear()
+for content_type, extension in ADDITIONAL_CONTENT_TYPES.items():
+ CONTENT_TYPES.add_type(content_type, extension)
+
+
+_CLOSE_FUTURES: Set[asyncio.Future[None]] = set()
+
+
+class FileResponse(StreamResponse):
+ """A response object can be used to send files."""
+
+ def __init__(
+ self,
+ path: PathLike,
+ chunk_size: int = 256 * 1024,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ super().__init__(status=status, reason=reason, headers=headers)
+
+ self._path = pathlib.Path(path)
+ self._chunk_size = chunk_size
+
+ def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes:
+ fobj.seek(offset)
+ return fobj.read(chunk_size) # type: ignore[no-any-return]
+
+ async def _sendfile_fallback(
+ self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
+ ) -> AbstractStreamWriter:
+ # To keep memory usage low,fobj is transferred in chunks
+ # controlled by the constructor's chunk_size argument.
+
+ chunk_size = self._chunk_size
+ loop = asyncio.get_event_loop()
+ chunk = await loop.run_in_executor(
+ None, self._seek_and_read, fobj, offset, chunk_size
+ )
+ while chunk:
+ await writer.write(chunk)
+ count = count - chunk_size
+ if count <= 0:
+ break
+ chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
+
+ await writer.drain()
+ return writer
+
+ async def _sendfile(
+ self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
+ ) -> AbstractStreamWriter:
+ writer = await super().prepare(request)
+ assert writer is not None
+
+ if NOSENDFILE or self.compression:
+ return await self._sendfile_fallback(writer, fobj, offset, count)
+
+ loop = request._loop
+ transport = request.transport
+ assert transport is not None
+
+ try:
+ await loop.sendfile(transport, fobj, offset, count)
+ except NotImplementedError:
+ return await self._sendfile_fallback(writer, fobj, offset, count)
+
+ await super().write_eof()
+ return writer
+
+ @staticmethod
+ def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool:
+ if len(etags) == 1 and etags[0].value == ETAG_ANY:
+ return True
+ return any(
+ etag.value == etag_value for etag in etags if weak or not etag.is_weak
+ )
+
+ async def _not_modified(
+ self, request: "BaseRequest", etag_value: str, last_modified: float
+ ) -> Optional[AbstractStreamWriter]:
+ self.set_status(HTTPNotModified.status_code)
+ self._length_check = False
+ self.etag = etag_value
+ self.last_modified = last_modified
+ # Delete any Content-Length headers provided by user. HTTP 304
+ # should always have empty response body
+ return await super().prepare(request)
+
+ async def _precondition_failed(
+ self, request: "BaseRequest"
+ ) -> Optional[AbstractStreamWriter]:
+ self.set_status(HTTPPreconditionFailed.status_code)
+ self.content_length = 0
+ return await super().prepare(request)
+
+ def _make_response(
+ self, request: "BaseRequest", accept_encoding: str
+ ) -> Tuple[
+ _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str]
+ ]:
+ """Return the response result, io object, stat result, and encoding.
+
+ If an uncompressed file is returned, the encoding is set to
+ :py:data:`None`.
+
+ This method should be called from a thread executor
+ since it calls os.stat which may block.
+ """
+ file_path, st, file_encoding = self._get_file_path_stat_encoding(
+ accept_encoding
+ )
+ if not file_path:
+ return _FileResponseResult.NOT_ACCEPTABLE, None, st, None
+
+ etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
+
+ # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2
+ if (ifmatch := request.if_match) is not None and not self._etag_match(
+ etag_value, ifmatch, weak=False
+ ):
+ return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
+
+ if (
+ (unmodsince := request.if_unmodified_since) is not None
+ and ifmatch is None
+ and st.st_mtime > unmodsince.timestamp()
+ ):
+ return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
+
+ # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2
+ if (ifnonematch := request.if_none_match) is not None and self._etag_match(
+ etag_value, ifnonematch, weak=True
+ ):
+ return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
+
+ if (
+ (modsince := request.if_modified_since) is not None
+ and ifnonematch is None
+ and st.st_mtime <= modsince.timestamp()
+ ):
+ return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
+
+ fobj = file_path.open("rb")
+ with suppress(OSError):
+ # fstat() may not be available on all platforms
+ # Once we open the file, we want the fstat() to ensure
+ # the file has not changed between the first stat()
+ # and the open().
+ st = os.stat(fobj.fileno())
+ return _FileResponseResult.SEND_FILE, fobj, st, file_encoding
+
+ def _get_file_path_stat_encoding(
+ self, accept_encoding: str
+ ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]:
+ file_path = self._path
+ for file_extension, file_encoding in ENCODING_EXTENSIONS.items():
+ if file_encoding not in accept_encoding:
+ continue
+
+ compressed_path = file_path.with_suffix(file_path.suffix + file_extension)
+ with suppress(OSError):
+ # Do not follow symlinks and ignore any non-regular files.
+ st = compressed_path.lstat()
+ if S_ISREG(st.st_mode):
+ return compressed_path, st, file_encoding
+
+ # Fallback to the uncompressed file
+ st = file_path.stat()
+ return file_path if S_ISREG(st.st_mode) else None, st, None
+
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
+ loop = asyncio.get_running_loop()
+ # Encoding comparisons should be case-insensitive
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
+ accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
+ try:
+ response_result, fobj, st, file_encoding = await loop.run_in_executor(
+ None, self._make_response, request, accept_encoding
+ )
+ except PermissionError:
+ self.set_status(HTTPForbidden.status_code)
+ return await super().prepare(request)
+ except OSError:
+ # Most likely to be FileNotFoundError or OSError for circular
+ # symlinks in python >= 3.13, so respond with 404.
+ self.set_status(HTTPNotFound.status_code)
+ return await super().prepare(request)
+
+ # Forbid special files like sockets, pipes, devices, etc.
+ if response_result is _FileResponseResult.NOT_ACCEPTABLE:
+ self.set_status(HTTPForbidden.status_code)
+ return await super().prepare(request)
+
+ if response_result is _FileResponseResult.PRE_CONDITION_FAILED:
+ return await self._precondition_failed(request)
+
+ if response_result is _FileResponseResult.NOT_MODIFIED:
+ etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
+ last_modified = st.st_mtime
+ return await self._not_modified(request, etag_value, last_modified)
+
+ assert fobj is not None
+ try:
+ return await self._prepare_open_file(request, fobj, st, file_encoding)
+ finally:
+ # We do not await here because we do not want to wait
+ # for the executor to finish before returning the response
+ # so the connection can begin servicing another request
+ # as soon as possible.
+ close_future = loop.run_in_executor(None, fobj.close)
+ # Hold a strong reference to the future to prevent it from being
+ # garbage collected before it completes.
+ _CLOSE_FUTURES.add(close_future)
+ close_future.add_done_callback(_CLOSE_FUTURES.remove)
+
+ async def _prepare_open_file(
+ self,
+ request: "BaseRequest",
+ fobj: io.BufferedReader,
+ st: os.stat_result,
+ file_encoding: Optional[str],
+ ) -> Optional[AbstractStreamWriter]:
+ status = self._status
+ file_size: int = st.st_size
+ file_mtime: float = st.st_mtime
+ count: int = file_size
+ start: Optional[int] = None
+
+ if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp():
+ # If-Range header check:
+ # condition = cached date >= last modification date
+ # return 206 if True else 200.
+ # if False:
+ # Range header would not be processed, return 200
+ # if True but Range header missing
+ # return 200
+ try:
+ rng = request.http_range
+ start = rng.start
+ end: Optional[int] = rng.stop
+ except ValueError:
+ # https://tools.ietf.org/html/rfc7233:
+ # A server generating a 416 (Range Not Satisfiable) response to
+ # a byte-range request SHOULD send a Content-Range header field
+ # with an unsatisfied-range value.
+ # The complete-length in a 416 response indicates the current
+ # length of the selected representation.
+ #
+ # Will do the same below. Many servers ignore this and do not
+ # send a Content-Range header with HTTP 416
+ self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
+ return await super().prepare(request)
+
+ # If a range request has been made, convert start, end slice
+ # notation into file pointer offset and count
+ if start is not None:
+ if start < 0 and end is None: # return tail of file
+ start += file_size
+ if start < 0:
+ # if Range:bytes=-1000 in request header but file size
+ # is only 200, there would be trouble without this
+ start = 0
+ count = file_size - start
+ else:
+ # rfc7233:If the last-byte-pos value is
+ # absent, or if the value is greater than or equal to
+ # the current length of the representation data,
+ # the byte range is interpreted as the remainder
+ # of the representation (i.e., the server replaces the
+ # value of last-byte-pos with a value that is one less than
+ # the current length of the selected representation).
+ count = (
+ min(end if end is not None else file_size, file_size) - start
+ )
+
+ if start >= file_size:
+ # HTTP 416 should be returned in this case.
+ #
+ # According to https://tools.ietf.org/html/rfc7233:
+ # If a valid byte-range-set includes at least one
+ # byte-range-spec with a first-byte-pos that is less than
+ # the current length of the representation, or at least one
+ # suffix-byte-range-spec with a non-zero suffix-length,
+ # then the byte-range-set is satisfiable. Otherwise, the
+ # byte-range-set is unsatisfiable.
+ self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
+ return await super().prepare(request)
+
+ status = HTTPPartialContent.status_code
+ # Even though you are sending the whole file, you should still
+ # return a HTTP 206 for a Range request.
+ self.set_status(status)
+
+ # If the Content-Type header is not already set, guess it based on the
+ # extension of the request path. The encoding returned by guess_type
+ # can be ignored since the map was cleared above.
+ if hdrs.CONTENT_TYPE not in self._headers:
+ if sys.version_info >= (3, 13):
+ guesser = CONTENT_TYPES.guess_file_type
+ else:
+ guesser = CONTENT_TYPES.guess_type
+ self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE
+
+ if file_encoding:
+ self._headers[hdrs.CONTENT_ENCODING] = file_encoding
+ self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
+ # Disable compression if we are already sending
+ # a compressed file since we don't want to double
+ # compress.
+ self._compression = False
+
+ self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}"
+ self.last_modified = file_mtime
+ self.content_length = count
+
+ self._headers[hdrs.ACCEPT_RANGES] = "bytes"
+
+ if status == HTTPPartialContent.status_code:
+ real_start = start
+ assert real_start is not None
+ self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
+ real_start, real_start + count - 1, file_size
+ )
+
+ # If we are sending 0 bytes calling sendfile() will throw a ValueError
+ if count == 0 or must_be_empty_body(request.method, status):
+ return await super().prepare(request)
+
+ # be aware that start could be None or int=0 here.
+ offset = start or 0
+
+ return await self._sendfile(request, fobj, offset, count)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_log.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_log.py"
new file mode 100644
index 0000000..d5ea2be
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_log.py"
@@ -0,0 +1,216 @@
+import datetime
+import functools
+import logging
+import os
+import re
+import time as time_mod
+from collections import namedtuple
+from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
+
+from .abc import AbstractAccessLogger
+from .web_request import BaseRequest
+from .web_response import StreamResponse
+
+KeyMethod = namedtuple("KeyMethod", "key method")
+
+
+class AccessLogger(AbstractAccessLogger):
+ """Helper object to log access.
+
+ Usage:
+ log = logging.getLogger("spam")
+ log_format = "%a %{User-Agent}i"
+ access_logger = AccessLogger(log, log_format)
+ access_logger.log(request, response, time)
+
+ Format:
+ %% The percent sign
+ %a Remote IP-address (IP-address of proxy if using reverse proxy)
+ %t Time when the request was started to process
+ %P The process ID of the child that serviced the request
+ %r First line of request
+ %s Response status code
+ %b Size of response in bytes, including HTTP headers
+ %T Time taken to serve the request, in seconds
+ %Tf Time taken to serve the request, in seconds with floating fraction
+ in .06f format
+ %D Time taken to serve the request, in microseconds
+ %{FOO}i request.headers['FOO']
+ %{FOO}o response.headers['FOO']
+ %{FOO}e os.environ['FOO']
+
+ """
+
+ LOG_FORMAT_MAP = {
+ "a": "remote_address",
+ "t": "request_start_time",
+ "P": "process_id",
+ "r": "first_request_line",
+ "s": "response_status",
+ "b": "response_size",
+ "T": "request_time",
+ "Tf": "request_time_frac",
+ "D": "request_time_micro",
+ "i": "request_header",
+ "o": "response_header",
+ }
+
+ LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
+ FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
+ CLEANUP_RE = re.compile(r"(%[^s])")
+ _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {}
+
+ def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
+ """Initialise the logger.
+
+ logger is a logger object to be used for logging.
+ log_format is a string with apache compatible log format description.
+
+ """
+ super().__init__(logger, log_format=log_format)
+
+ _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
+ if not _compiled_format:
+ _compiled_format = self.compile_format(log_format)
+ AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
+
+ self._log_format, self._methods = _compiled_format
+
+ def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
+ """Translate log_format into form usable by modulo formatting
+
+ All known atoms will be replaced with %s
+ Also methods for formatting of those atoms will be added to
+ _methods in appropriate order
+
+ For example we have log_format = "%a %t"
+ This format will be translated to "%s %s"
+ Also contents of _methods will be
+ [self._format_a, self._format_t]
+ These method will be called and results will be passed
+ to translated string format.
+
+ Each _format_* method receive 'args' which is list of arguments
+ given to self.log
+
+ Exceptions are _format_e, _format_i and _format_o methods which
+ also receive key name (by functools.partial)
+
+ """
+ # list of (key, method) tuples, we don't use an OrderedDict as users
+ # can repeat the same key more than once
+ methods = list()
+
+ for atom in self.FORMAT_RE.findall(log_format):
+ if atom[1] == "":
+ format_key1 = self.LOG_FORMAT_MAP[atom[0]]
+ m = getattr(AccessLogger, "_format_%s" % atom[0])
+ key_method = KeyMethod(format_key1, m)
+ else:
+ format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
+ m = getattr(AccessLogger, "_format_%s" % atom[2])
+ key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
+
+ methods.append(key_method)
+
+ log_format = self.FORMAT_RE.sub(r"%s", log_format)
+ log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
+ return log_format, methods
+
+ @staticmethod
+ def _format_i(
+ key: str, request: BaseRequest, response: StreamResponse, time: float
+ ) -> str:
+ if request is None:
+ return "(no headers)"
+
+ # suboptimal, make istr(key) once
+ return request.headers.get(key, "-")
+
+ @staticmethod
+ def _format_o(
+ key: str, request: BaseRequest, response: StreamResponse, time: float
+ ) -> str:
+ # suboptimal, make istr(key) once
+ return response.headers.get(key, "-")
+
+ @staticmethod
+ def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ if request is None:
+ return "-"
+ ip = request.remote
+ return ip if ip is not None else "-"
+
+ @staticmethod
+ def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone))
+ now = datetime.datetime.now(tz)
+ start_time = now - datetime.timedelta(seconds=time)
+ return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]")
+
+ @staticmethod
+ def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return "<%s>" % os.getpid()
+
+ @staticmethod
+ def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ if request is None:
+ return "-"
+ return "{} {} HTTP/{}.{}".format(
+ request.method,
+ request.path_qs,
+ request.version.major,
+ request.version.minor,
+ )
+
+ @staticmethod
+ def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
+ return response.status
+
+ @staticmethod
+ def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
+ return response.body_length
+
+ @staticmethod
+ def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return str(round(time))
+
+ @staticmethod
+ def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return "%06f" % time
+
+ @staticmethod
+ def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return str(round(time * 1000000))
+
+ def _format_line(
+ self, request: BaseRequest, response: StreamResponse, time: float
+ ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
+ return [(key, method(request, response, time)) for key, method in self._methods]
+
+ @property
+ def enabled(self) -> bool:
+ """Check if logger is enabled."""
+ # Avoid formatting the log line if it will not be emitted.
+ return self.logger.isEnabledFor(logging.INFO)
+
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ try:
+ fmt_info = self._format_line(request, response, time)
+
+ values = list()
+ extra = dict()
+ for key, value in fmt_info:
+ values.append(value)
+
+ if key.__class__ is str:
+ extra[key] = value
+ else:
+ k1, k2 = key # type: ignore[misc]
+ dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
+ dct[k2] = value # type: ignore[index,has-type]
+ extra[k1] = dct # type: ignore[has-type,assignment]
+
+ self.logger.info(self._log_format % tuple(values), extra=extra)
+ except Exception:
+ self.logger.exception("Error in logging")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_middlewares.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_middlewares.py"
new file mode 100644
index 0000000..2f1f5f5
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_middlewares.py"
@@ -0,0 +1,121 @@
+import re
+from typing import TYPE_CHECKING, Tuple, Type, TypeVar
+
+from .typedefs import Handler, Middleware
+from .web_exceptions import HTTPMove, HTTPPermanentRedirect
+from .web_request import Request
+from .web_response import StreamResponse
+from .web_urldispatcher import SystemRoute
+
+__all__ = (
+ "middleware",
+ "normalize_path_middleware",
+)
+
+if TYPE_CHECKING:
+ from .web_app import Application
+
+_Func = TypeVar("_Func")
+
+
+async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
+ alt_request = request.clone(rel_url=path)
+
+ match_info = await request.app.router.resolve(alt_request)
+ alt_request._match_info = match_info
+
+ if match_info.http_exception is None:
+ return True, alt_request
+
+ return False, request
+
+
+def middleware(f: _Func) -> _Func:
+ f.__middleware_version__ = 1 # type: ignore[attr-defined]
+ return f
+
+
+def normalize_path_middleware(
+ *,
+ append_slash: bool = True,
+ remove_slash: bool = False,
+ merge_slashes: bool = True,
+ redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
+) -> Middleware:
+ """Factory for producing a middleware that normalizes the path of a request.
+
+ Normalizing means:
+ - Add or remove a trailing slash to the path.
+ - Double slashes are replaced by one.
+
+ The middleware returns as soon as it finds a path that resolves
+ correctly. The order if both merge and append/remove are enabled is
+ 1) merge slashes
+ 2) append/remove slash
+ 3) both merge slashes and append/remove slash.
+ If the path resolves with at least one of those conditions, it will
+ redirect to the new path.
+
+ Only one of `append_slash` and `remove_slash` can be enabled. If both
+ are `True` the factory will raise an assertion error
+
+ If `append_slash` is `True` the middleware will append a slash when
+ needed. If a resource is defined with trailing slash and the request
+ comes without it, it will append it automatically.
+
+ If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
+ the middleware will remove trailing slashes and redirect if the resource
+ is defined
+
+ If merge_slashes is True, merge multiple consecutive slashes in the
+ path into one.
+ """
+ correct_configuration = not (append_slash and remove_slash)
+ assert correct_configuration, "Cannot both remove and append slash"
+
+ @middleware
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
+ if isinstance(request.match_info.route, SystemRoute):
+ paths_to_check = []
+ if "?" in request.raw_path:
+ path, query = request.raw_path.split("?", 1)
+ query = "?" + query
+ else:
+ query = ""
+ path = request.raw_path
+
+ if merge_slashes:
+ paths_to_check.append(re.sub("//+", "/", path))
+ if append_slash and not request.path.endswith("/"):
+ paths_to_check.append(path + "/")
+ if remove_slash and request.path.endswith("/"):
+ paths_to_check.append(path[:-1])
+ if merge_slashes and append_slash:
+ paths_to_check.append(re.sub("//+", "/", path + "/"))
+ if merge_slashes and remove_slash:
+ merged_slashes = re.sub("//+", "/", path)
+ paths_to_check.append(merged_slashes[:-1])
+
+ for path in paths_to_check:
+ path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
+ resolves, request = await _check_request_resolves(request, path)
+ if resolves:
+ raise redirect_class(request.raw_path + query)
+
+ return await handler(request)
+
+ return impl
+
+
+def _fix_request_current_app(app: "Application") -> Middleware:
+ @middleware
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
+ match_info = request.match_info
+ prev = match_info.current_app
+ match_info.current_app = app
+ try:
+ return await handler(request)
+ finally:
+ match_info.current_app = prev
+
+ return impl
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_protocol.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_protocol.py"
new file mode 100644
index 0000000..1bd344a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_protocol.py"
@@ -0,0 +1,792 @@
+import asyncio
+import asyncio.streams
+import sys
+import traceback
+import warnings
+from collections import deque
+from contextlib import suppress
+from html import escape as html_escape
+from http import HTTPStatus
+from logging import Logger
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Deque,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+import yarl
+from propcache import under_cached_property
+
+from .abc import AbstractAccessLogger, AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .helpers import ceil_timeout
+from .http import (
+ HttpProcessingError,
+ HttpRequestParser,
+ HttpVersion10,
+ RawRequestMessage,
+ StreamWriter,
+)
+from .http_exceptions import BadHttpMethod
+from .log import access_logger, server_logger
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .tcp_helpers import tcp_keepalive
+from .web_exceptions import HTTPException, HTTPInternalServerError
+from .web_log import AccessLogger
+from .web_request import BaseRequest
+from .web_response import Response, StreamResponse
+
+__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
+
+if TYPE_CHECKING:
+ import ssl
+
+ from .web_server import Server
+
+
+_RequestFactory = Callable[
+ [
+ RawRequestMessage,
+ StreamReader,
+ "RequestHandler",
+ AbstractStreamWriter,
+ "asyncio.Task[None]",
+ ],
+ BaseRequest,
+]
+
+_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
+
+ERROR = RawRequestMessage(
+ "UNKNOWN",
+ "/",
+ HttpVersion10,
+ {}, # type: ignore[arg-type]
+ {}, # type: ignore[arg-type]
+ True,
+ None,
+ False,
+ False,
+ yarl.URL("/"),
+)
+
+
+class RequestPayloadError(Exception):
+ """Payload parsing error."""
+
+
+class PayloadAccessError(Exception):
+ """Payload was accessed after response was sent."""
+
+
+_PAYLOAD_ACCESS_ERROR = PayloadAccessError()
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class _ErrInfo:
+ status: int
+ exc: BaseException
+ message: str
+
+
+_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
+
+
+class RequestHandler(BaseProtocol):
+ """HTTP protocol implementation.
+
+ RequestHandler handles incoming HTTP request. It reads request line,
+ request headers and request payload and calls handle_request() method.
+ By default it always returns with 404 response.
+
+ RequestHandler handles errors in incoming request, like bad
+ status line, bad headers or incomplete payload. If any error occurs,
+ connection gets closed.
+
+ keepalive_timeout -- number of seconds before closing
+ keep-alive connection
+
+ tcp_keepalive -- TCP keep-alive is on, default is on
+
+ debug -- enable debug mode
+
+ logger -- custom logger object
+
+ access_log_class -- custom class for access_logger
+
+ access_log -- custom logging object
+
+ access_log_format -- access log format string
+
+ loop -- Optional event loop
+
+ max_line_size -- Optional maximum header line size
+
+ max_field_size -- Optional maximum header field size
+
+ max_headers -- Optional maximum header size
+
+ timeout_ceil_threshold -- Optional value to specify
+ threshold to ceil() timeout
+ values
+
+ """
+
+ __slots__ = (
+ "_request_count",
+ "_keepalive",
+ "_manager",
+ "_request_handler",
+ "_request_factory",
+ "_tcp_keepalive",
+ "_next_keepalive_close_time",
+ "_keepalive_handle",
+ "_keepalive_timeout",
+ "_lingering_time",
+ "_messages",
+ "_message_tail",
+ "_handler_waiter",
+ "_waiter",
+ "_task_handler",
+ "_upgrade",
+ "_payload_parser",
+ "_request_parser",
+ "_reading_paused",
+ "logger",
+ "debug",
+ "access_log",
+ "access_logger",
+ "_close",
+ "_force_close",
+ "_current_request",
+ "_timeout_ceil_threshold",
+ "_request_in_progress",
+ "_logging_enabled",
+ "_cache",
+ )
+
+ def __init__(
+ self,
+ manager: "Server",
+ *,
+ loop: asyncio.AbstractEventLoop,
+ # Default should be high enough that it's likely longer than a reverse proxy.
+ keepalive_timeout: float = 3630,
+ tcp_keepalive: bool = True,
+ logger: Logger = server_logger,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log: Logger = access_logger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ debug: bool = False,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ lingering_time: float = 10.0,
+ read_bufsize: int = 2**16,
+ auto_decompress: bool = True,
+ timeout_ceil_threshold: float = 5,
+ ):
+ super().__init__(loop)
+
+ # _request_count is the number of requests processed with the same connection.
+ self._request_count = 0
+ self._keepalive = False
+ self._current_request: Optional[BaseRequest] = None
+ self._manager: Optional[Server] = manager
+ self._request_handler: Optional[_RequestHandler] = manager.request_handler
+ self._request_factory: Optional[_RequestFactory] = manager.request_factory
+
+ self._tcp_keepalive = tcp_keepalive
+ # placeholder to be replaced on keepalive timeout setup
+ self._next_keepalive_close_time = 0.0
+ self._keepalive_handle: Optional[asyncio.Handle] = None
+ self._keepalive_timeout = keepalive_timeout
+ self._lingering_time = float(lingering_time)
+
+ self._messages: Deque[_MsgType] = deque()
+ self._message_tail = b""
+
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._handler_waiter: Optional[asyncio.Future[None]] = None
+ self._task_handler: Optional[asyncio.Task[None]] = None
+
+ self._upgrade = False
+ self._payload_parser: Any = None
+ self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
+ self,
+ loop,
+ read_bufsize,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
+ max_headers=max_headers,
+ payload_exception=RequestPayloadError,
+ auto_decompress=auto_decompress,
+ )
+
+ self._timeout_ceil_threshold: float = 5
+ try:
+ self._timeout_ceil_threshold = float(timeout_ceil_threshold)
+ except (TypeError, ValueError):
+ pass
+
+ self.logger = logger
+ self.debug = debug
+ self.access_log = access_log
+ if access_log:
+ self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
+ access_log, access_log_format
+ )
+ self._logging_enabled = self.access_logger.enabled
+ else:
+ self.access_logger = None
+ self._logging_enabled = False
+
+ self._close = False
+ self._force_close = False
+ self._request_in_progress = False
+ self._cache: dict[str, Any] = {}
+
+ def __repr__(self) -> str:
+ return "<{} {}>".format(
+ self.__class__.__name__,
+ "connected" if self.transport is not None else "disconnected",
+ )
+
+ @under_cached_property
+ def ssl_context(self) -> Optional["ssl.SSLContext"]:
+ """Return SSLContext if available."""
+ return (
+ None
+ if self.transport is None
+ else self.transport.get_extra_info("sslcontext")
+ )
+
+ @under_cached_property
+ def peername(
+ self,
+ ) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]:
+ """Return peername if available."""
+ return (
+ None
+ if self.transport is None
+ else self.transport.get_extra_info("peername")
+ )
+
+ @property
+ def keepalive_timeout(self) -> float:
+ return self._keepalive_timeout
+
+ async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
+ """Do worker process exit preparations.
+
+ We need to clean up everything and stop accepting requests.
+ It is especially important for keep-alive connections.
+ """
+ self._force_close = True
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ # Wait for graceful handler completion
+ if self._request_in_progress:
+ # The future is only created when we are shutting
+ # down while the handler is still processing a request
+ # to avoid creating a future for every request.
+ self._handler_waiter = self._loop.create_future()
+ try:
+ async with ceil_timeout(timeout):
+ await self._handler_waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._handler_waiter = None
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ # Then cancel handler and wait
+ try:
+ async with ceil_timeout(timeout):
+ if self._current_request is not None:
+ self._current_request._cancel(asyncio.CancelledError())
+
+ if self._task_handler is not None and not self._task_handler.done():
+ await asyncio.shield(self._task_handler)
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+
+ # force-close non-idle handler
+ if self._task_handler is not None:
+ self._task_handler.cancel()
+
+ self.force_close()
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ super().connection_made(transport)
+
+ real_transport = cast(asyncio.Transport, transport)
+ if self._tcp_keepalive:
+ tcp_keepalive(real_transport)
+
+ assert self._manager is not None
+ self._manager.connection_made(self, real_transport)
+
+ loop = self._loop
+ if sys.version_info >= (3, 12):
+ task = asyncio.Task(self.start(), loop=loop, eager_start=True)
+ else:
+ task = loop.create_task(self.start())
+ self._task_handler = task
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ if self._manager is None:
+ return
+ self._manager.connection_lost(self, exc)
+
+ # Grab value before setting _manager to None.
+ handler_cancellation = self._manager.handler_cancellation
+
+ self.force_close()
+ super().connection_lost(exc)
+ self._manager = None
+ self._request_factory = None
+ self._request_handler = None
+ self._request_parser = None
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ if self._current_request is not None:
+ if exc is None:
+ exc = ConnectionResetError("Connection lost")
+ self._current_request._cancel(exc)
+
+ if handler_cancellation and self._task_handler is not None:
+ self._task_handler.cancel()
+
+ self._task_handler = None
+
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+
+ def set_parser(self, parser: Any) -> None:
+ # Actual type is WebReader
+ assert self._payload_parser is None
+
+ self._payload_parser = parser
+
+ if self._message_tail:
+ self._payload_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+
+ def eof_received(self) -> None:
+ pass
+
+ def data_received(self, data: bytes) -> None:
+ if self._force_close or self._close:
+ return
+ # parse http messages
+ messages: Sequence[_MsgType]
+ if self._payload_parser is None and not self._upgrade:
+ assert self._request_parser is not None
+ try:
+ messages, upgraded, tail = self._request_parser.feed_data(data)
+ except HttpProcessingError as exc:
+ messages = [
+ (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
+ ]
+ upgraded = False
+ tail = b""
+
+ for msg, payload in messages or ():
+ self._request_count += 1
+ self._messages.append((msg, payload))
+
+ waiter = self._waiter
+ if messages and waiter is not None and not waiter.done():
+ # don't set result twice
+ waiter.set_result(None)
+
+ self._upgrade = upgraded
+ if upgraded and tail:
+ self._message_tail = tail
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgrade and data:
+ self._message_tail += data
+
+ # feed payload
+ elif data:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self.close()
+
+ def keep_alive(self, val: bool) -> None:
+ """Set keep-alive connection mode.
+
+ :param bool val: new state.
+ """
+ self._keepalive = val
+ if self._keepalive_handle:
+ self._keepalive_handle.cancel()
+ self._keepalive_handle = None
+
+ def close(self) -> None:
+ """Close connection.
+
+ Stop accepting new pipelining messages and close
+ connection when handlers done processing messages.
+ """
+ self._close = True
+ if self._waiter:
+ self._waiter.cancel()
+
+ def force_close(self) -> None:
+ """Forcefully close connection."""
+ self._force_close = True
+ if self._waiter:
+ self._waiter.cancel()
+ if self.transport is not None:
+ self.transport.close()
+ self.transport = None
+
+ def log_access(
+ self, request: BaseRequest, response: StreamResponse, time: Optional[float]
+ ) -> None:
+ if self._logging_enabled and self.access_logger is not None:
+ if TYPE_CHECKING:
+ assert time is not None
+ self.access_logger.log(request, response, self._loop.time() - time)
+
+ def log_debug(self, *args: Any, **kw: Any) -> None:
+ if self.debug:
+ self.logger.debug(*args, **kw)
+
+ def log_exception(self, *args: Any, **kw: Any) -> None:
+ self.logger.exception(*args, **kw)
+
+ def _process_keepalive(self) -> None:
+ self._keepalive_handle = None
+ if self._force_close or not self._keepalive:
+ return
+
+ loop = self._loop
+ now = loop.time()
+ close_time = self._next_keepalive_close_time
+ if now < close_time:
+ # Keep alive close check fired too early, reschedule
+ self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
+ return
+
+ # handler in idle state
+ if self._waiter and not self._waiter.done():
+ self.force_close()
+
+ async def _handle_request(
+ self,
+ request: BaseRequest,
+ start_time: Optional[float],
+ request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
+ ) -> Tuple[StreamResponse, bool]:
+ self._request_in_progress = True
+ try:
+ try:
+ self._current_request = request
+ resp = await request_handler(request)
+ finally:
+ self._current_request = None
+ except HTTPException as exc:
+ resp = exc
+ resp, reset = await self.finish_response(request, resp, start_time)
+ except asyncio.CancelledError:
+ raise
+ except asyncio.TimeoutError as exc:
+ self.log_debug("Request handler timed out.", exc_info=exc)
+ resp = self.handle_error(request, 504)
+ resp, reset = await self.finish_response(request, resp, start_time)
+ except Exception as exc:
+ resp = self.handle_error(request, 500, exc)
+ resp, reset = await self.finish_response(request, resp, start_time)
+ else:
+ # Deprecation warning (See #2415)
+ if getattr(resp, "__http_exception__", False):
+ warnings.warn(
+ "returning HTTPException object is deprecated "
+ "(#2415) and will be removed, "
+ "please raise the exception instead",
+ DeprecationWarning,
+ )
+
+ resp, reset = await self.finish_response(request, resp, start_time)
+ finally:
+ self._request_in_progress = False
+ if self._handler_waiter is not None:
+ self._handler_waiter.set_result(None)
+
+ return resp, reset
+
+ async def start(self) -> None:
+ """Process incoming request.
+
+ It reads request line, request headers and request payload, then
+ calls handle_request() method. Subclass has to override
+ handle_request(). start() handles various exceptions in request
+ or response handling. Connection is being closed always unless
+ keep_alive(True) specified.
+ """
+ loop = self._loop
+ manager = self._manager
+ assert manager is not None
+ keepalive_timeout = self._keepalive_timeout
+ resp = None
+ assert self._request_factory is not None
+ assert self._request_handler is not None
+
+ while not self._force_close:
+ if not self._messages:
+ try:
+ # wait for next request
+ self._waiter = loop.create_future()
+ await self._waiter
+ finally:
+ self._waiter = None
+
+ message, payload = self._messages.popleft()
+
+ # time is only fetched if logging is enabled as otherwise
+ # its thrown away and never used.
+ start = loop.time() if self._logging_enabled else None
+
+ manager.requests_count += 1
+ writer = StreamWriter(self, loop)
+ if isinstance(message, _ErrInfo):
+ # make request_factory work
+ request_handler = self._make_error_handler(message)
+ message = ERROR
+ else:
+ request_handler = self._request_handler
+
+ # Important don't hold a reference to the current task
+ # as on traceback it will prevent the task from being
+ # collected and will cause a memory leak.
+ request = self._request_factory(
+ message,
+ payload,
+ self,
+ writer,
+ self._task_handler or asyncio.current_task(loop), # type: ignore[arg-type]
+ )
+ try:
+ # a new task is used for copy context vars (#3406)
+ coro = self._handle_request(request, start, request_handler)
+ if sys.version_info >= (3, 12):
+ task = asyncio.Task(coro, loop=loop, eager_start=True)
+ else:
+ task = loop.create_task(coro)
+ try:
+ resp, reset = await task
+ except ConnectionError:
+ self.log_debug("Ignored premature client disconnection")
+ break
+
+ # Drop the processed task from asyncio.Task.all_tasks() early
+ del task
+ if reset:
+ self.log_debug("Ignored premature client disconnection 2")
+ break
+
+ # notify server about keep-alive
+ self._keepalive = bool(resp.keep_alive)
+
+ # check payload
+ if not payload.is_eof():
+ lingering_time = self._lingering_time
+ if not self._force_close and lingering_time:
+ self.log_debug(
+ "Start lingering close timer for %s sec.", lingering_time
+ )
+
+ now = loop.time()
+ end_t = now + lingering_time
+
+ try:
+ while not payload.is_eof() and now < end_t:
+ async with ceil_timeout(end_t - now):
+ # read and ignore
+ await payload.readany()
+ now = loop.time()
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ if (
+ sys.version_info >= (3, 11)
+ and (t := asyncio.current_task())
+ and t.cancelling()
+ ):
+ raise
+
+ # if payload still uncompleted
+ if not payload.is_eof() and not self._force_close:
+ self.log_debug("Uncompleted request.")
+ self.close()
+
+ payload.set_exception(_PAYLOAD_ACCESS_ERROR)
+
+ except asyncio.CancelledError:
+ self.log_debug("Ignored premature client disconnection")
+ self.force_close()
+ raise
+ except Exception as exc:
+ self.log_exception("Unhandled exception", exc_info=exc)
+ self.force_close()
+ except BaseException:
+ self.force_close()
+ raise
+ finally:
+ request._task = None # type: ignore[assignment] # Break reference cycle in case of exception
+ if self.transport is None and resp is not None:
+ self.log_debug("Ignored premature client disconnection.")
+
+ if self._keepalive and not self._close and not self._force_close:
+ # start keep-alive timer
+ close_time = loop.time() + keepalive_timeout
+ self._next_keepalive_close_time = close_time
+ if self._keepalive_handle is None:
+ self._keepalive_handle = loop.call_at(
+ close_time, self._process_keepalive
+ )
+ else:
+ break
+
+ # remove handler, close transport if no handlers left
+ if not self._force_close:
+ self._task_handler = None
+ if self.transport is not None:
+ self.transport.close()
+
+ async def finish_response(
+ self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float]
+ ) -> Tuple[StreamResponse, bool]:
+ """Prepare the response and write_eof, then log access.
+
+ This has to
+ be called within the context of any exception so the access logger
+ can get exception information. Returns True if the client disconnects
+ prematurely.
+ """
+ request._finish()
+ if self._request_parser is not None:
+ self._request_parser.set_upgraded(False)
+ self._upgrade = False
+ if self._message_tail:
+ self._request_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+ try:
+ prepare_meth = resp.prepare
+ except AttributeError:
+ if resp is None:
+ self.log_exception("Missing return statement on request handler")
+ else:
+ self.log_exception(
+ "Web-handler should return a response instance, "
+ "got {!r}".format(resp)
+ )
+ exc = HTTPInternalServerError()
+ resp = Response(
+ status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
+ )
+ prepare_meth = resp.prepare
+ try:
+ await prepare_meth(request)
+ await resp.write_eof()
+ except ConnectionError:
+ self.log_access(request, resp, start_time)
+ return resp, True
+
+ self.log_access(request, resp, start_time)
+ return resp, False
+
+ def handle_error(
+ self,
+ request: BaseRequest,
+ status: int = 500,
+ exc: Optional[BaseException] = None,
+ message: Optional[str] = None,
+ ) -> StreamResponse:
+ """Handle errors.
+
+ Returns HTTP response with specific status code. Logs additional
+ information. It always closes current connection.
+ """
+ if self._request_count == 1 and isinstance(exc, BadHttpMethod):
+ # BadHttpMethod is common when a client sends non-HTTP
+ # or encrypted traffic to an HTTP port. This is expected
+ # to happen when connected to the public internet so we log
+ # it at the debug level as to not fill logs with noise.
+ self.logger.debug(
+ "Error handling request from %s", request.remote, exc_info=exc
+ )
+ else:
+ self.log_exception(
+ "Error handling request from %s", request.remote, exc_info=exc
+ )
+
+ # some data already got sent, connection is broken
+ if request.writer.output_size > 0:
+ raise ConnectionError(
+ "Response is sent already, cannot send another response "
+ "with the error message"
+ )
+
+ ct = "text/plain"
+ if status == HTTPStatus.INTERNAL_SERVER_ERROR:
+ title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
+ msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
+ tb = None
+ if self.debug:
+ with suppress(Exception):
+ tb = traceback.format_exc()
+
+ if "text/html" in request.headers.get("Accept", ""):
+ if tb:
+ tb = html_escape(tb)
+ msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
+ message = (
+ "<html><head>"
+ "<title>{title}</title>"
+ "</head><body>\n<h1>{title}</h1>"
+ "\n{msg}\n</body></html>\n"
+ ).format(title=title, msg=msg)
+ ct = "text/html"
+ else:
+ if tb:
+ msg = tb
+ message = title + "\n\n" + msg
+
+ resp = Response(status=status, text=message, content_type=ct)
+ resp.force_close()
+
+ return resp
+
+ def _make_error_handler(
+ self, err_info: _ErrInfo
+ ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
+ async def handler(request: BaseRequest) -> StreamResponse:
+ return self.handle_error(
+ request, err_info.status, err_info.exc, err_info.message
+ )
+
+ return handler
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_request.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_request.py"
new file mode 100644
index 0000000..0eafcd6
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_request.py"
@@ -0,0 +1,914 @@
+import asyncio
+import datetime
+import io
+import re
+import socket
+import string
+import tempfile
+import types
+import warnings
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Final,
+ Iterator,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Pattern,
+ Tuple,
+ Union,
+ cast,
+)
+from urllib.parse import parse_qsl
+
+import attr
+from multidict import (
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ MultiMapping,
+)
+from yarl import URL
+
+from . import hdrs
+from ._cookie_helpers import parse_cookie_header
+from .abc import AbstractStreamWriter
+from .helpers import (
+ _SENTINEL,
+ DEBUG,
+ ETAG_ANY,
+ LIST_QUOTED_ETAG_RE,
+ ChainMapProxy,
+ ETag,
+ HeadersMixin,
+ parse_http_date,
+ reify,
+ sentinel,
+ set_exception,
+)
+from .http_parser import RawRequestMessage
+from .http_writer import HttpVersion
+from .multipart import BodyPartReader, MultipartReader
+from .streams import EmptyStreamReader, StreamReader
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ JSONDecoder,
+ LooseHeaders,
+ RawHeaders,
+ StrOrURL,
+)
+from .web_exceptions import HTTPRequestEntityTooLarge
+from .web_response import StreamResponse
+
+__all__ = ("BaseRequest", "FileField", "Request")
+
+
+if TYPE_CHECKING:
+ from .web_app import Application
+ from .web_protocol import RequestHandler
+ from .web_urldispatcher import UrlMappingMatchInfo
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class FileField:
+ name: str
+ filename: str
+ file: io.BufferedReader
+ content_type: str
+ headers: CIMultiDictProxy[str]
+
+
+_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
+# '-' at the end to prevent interpretation as range in a char class
+
+_TOKEN: Final[str] = rf"[{_TCHAR}]+"
+
+_QDTEXT: Final[str] = r"[{}]".format(
+ r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
+)
+# qdtext includes 0x5C to escape 0x5D ('\]')
+# qdtext excludes obs-text (because obsoleted, and encoding not specified)
+
+_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
+
+_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
+ qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
+)
+
+_FORWARDED_PAIR: Final[str] = (
+ r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
+ token=_TOKEN, quoted_string=_QUOTED_STRING
+ )
+)
+
+_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
+# same pattern as _QUOTED_PAIR but contains a capture group
+
+_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
+
+############################################################
+# HTTP Request
+############################################################
+
+
+class BaseRequest(MutableMapping[str, Any], HeadersMixin):
+
+ POST_METHODS = {
+ hdrs.METH_PATCH,
+ hdrs.METH_POST,
+ hdrs.METH_PUT,
+ hdrs.METH_TRACE,
+ hdrs.METH_DELETE,
+ }
+
+ ATTRS = HeadersMixin.ATTRS | frozenset(
+ [
+ "_message",
+ "_protocol",
+ "_payload_writer",
+ "_payload",
+ "_headers",
+ "_method",
+ "_version",
+ "_rel_url",
+ "_post",
+ "_read_bytes",
+ "_state",
+ "_cache",
+ "_task",
+ "_client_max_size",
+ "_loop",
+ "_transport_sslcontext",
+ "_transport_peername",
+ ]
+ )
+ _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
+ _read_bytes: Optional[bytes] = None
+
+ def __init__(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: "RequestHandler",
+ payload_writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ loop: asyncio.AbstractEventLoop,
+ *,
+ client_max_size: int = 1024**2,
+ state: Optional[Dict[str, Any]] = None,
+ scheme: Optional[str] = None,
+ host: Optional[str] = None,
+ remote: Optional[str] = None,
+ ) -> None:
+ self._message = message
+ self._protocol = protocol
+ self._payload_writer = payload_writer
+
+ self._payload = payload
+ self._headers: CIMultiDictProxy[str] = message.headers
+ self._method = message.method
+ self._version = message.version
+ self._cache: Dict[str, Any] = {}
+ url = message.url
+ if url.absolute:
+ if scheme is not None:
+ url = url.with_scheme(scheme)
+ if host is not None:
+ url = url.with_host(host)
+ # absolute URL is given,
+ # override auto-calculating url, host, and scheme
+ # all other properties should be good
+ self._cache["url"] = url
+ self._cache["host"] = url.host
+ self._cache["scheme"] = url.scheme
+ self._rel_url = url.relative()
+ else:
+ self._rel_url = url
+ if scheme is not None:
+ self._cache["scheme"] = scheme
+ if host is not None:
+ self._cache["host"] = host
+
+ self._state = {} if state is None else state
+ self._task = task
+ self._client_max_size = client_max_size
+ self._loop = loop
+
+ self._transport_sslcontext = protocol.ssl_context
+ self._transport_peername = protocol.peername
+
+ if remote is not None:
+ self._cache["remote"] = remote
+
+ def clone(
+ self,
+ *,
+ method: Union[str, _SENTINEL] = sentinel,
+ rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
+ headers: Union[LooseHeaders, _SENTINEL] = sentinel,
+ scheme: Union[str, _SENTINEL] = sentinel,
+ host: Union[str, _SENTINEL] = sentinel,
+ remote: Union[str, _SENTINEL] = sentinel,
+ client_max_size: Union[int, _SENTINEL] = sentinel,
+ ) -> "BaseRequest":
+ """Clone itself with replacement some attributes.
+
+ Creates and returns a new instance of Request object. If no parameters
+ are given, an exact copy is returned. If a parameter is not passed, it
+ will reuse the one from the current request object.
+ """
+ if self._read_bytes:
+ raise RuntimeError("Cannot clone request after reading its content")
+
+ dct: Dict[str, Any] = {}
+ if method is not sentinel:
+ dct["method"] = method
+ if rel_url is not sentinel:
+ new_url: URL = URL(rel_url)
+ dct["url"] = new_url
+ dct["path"] = str(new_url)
+ if headers is not sentinel:
+ # a copy semantic
+ dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
+ dct["raw_headers"] = tuple(
+ (k.encode("utf-8"), v.encode("utf-8"))
+ for k, v in dct["headers"].items()
+ )
+
+ message = self._message._replace(**dct)
+
+ kwargs = {}
+ if scheme is not sentinel:
+ kwargs["scheme"] = scheme
+ if host is not sentinel:
+ kwargs["host"] = host
+ if remote is not sentinel:
+ kwargs["remote"] = remote
+ if client_max_size is sentinel:
+ client_max_size = self._client_max_size
+
+ return self.__class__(
+ message,
+ self._payload,
+ self._protocol,
+ self._payload_writer,
+ self._task,
+ self._loop,
+ client_max_size=client_max_size,
+ state=self._state.copy(),
+ **kwargs,
+ )
+
+ @property
+ def task(self) -> "asyncio.Task[None]":
+ return self._task
+
+ @property
+ def protocol(self) -> "RequestHandler":
+ return self._protocol
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ if self._protocol is None:
+ return None
+ return self._protocol.transport
+
+ @property
+ def writer(self) -> AbstractStreamWriter:
+ return self._payload_writer
+
+ @property
+ def client_max_size(self) -> int:
+ return self._client_max_size
+
+ @reify
+ def message(self) -> RawRequestMessage:
+ warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
+ return self._message
+
+ @reify
+ def rel_url(self) -> URL:
+ return self._rel_url
+
+ @reify
+ def loop(self) -> asyncio.AbstractEventLoop:
+ warnings.warn(
+ "request.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ # MutableMapping API
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ ########
+
+ @reify
+ def secure(self) -> bool:
+ """A bool indicating if the request is handled with SSL."""
+ return self.scheme == "https"
+
+ @reify
+ def forwarded(self) -> Tuple[Mapping[str, str], ...]:
+ """A tuple containing all parsed Forwarded header(s).
+
+ Makes an effort to parse Forwarded headers as specified by RFC 7239:
+
+ - It adds one (immutable) dictionary per Forwarded 'field-value', ie
+ per proxy. The element corresponds to the data in the Forwarded
+ field-value added by the first proxy encountered by the client. Each
+ subsequent item corresponds to those added by later proxies.
+ - It checks that every value has valid syntax in general as specified
+ in section 4: either a 'token' or a 'quoted-string'.
+ - It un-escapes found escape sequences.
+ - It does NOT validate 'by' and 'for' contents as specified in section
+ 6.
+ - It does NOT validate 'host' contents (Host ABNF).
+ - It does NOT validate 'proto' contents for valid URI scheme names.
+
+ Returns a tuple containing one or more immutable dicts
+ """
+ elems = []
+ for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
+ length = len(field_value)
+ pos = 0
+ need_separator = False
+ elem: Dict[str, str] = {}
+ elems.append(types.MappingProxyType(elem))
+ while 0 <= pos < length:
+ match = _FORWARDED_PAIR_RE.match(field_value, pos)
+ if match is not None: # got a valid forwarded-pair
+ if need_separator:
+ # bad syntax here, skip to next comma
+ pos = field_value.find(",", pos)
+ else:
+ name, value, port = match.groups()
+ if value[0] == '"':
+ # quoted string: remove quotes and unescape
+ value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
+ if port:
+ value += port
+ elem[name.lower()] = value
+ pos += len(match.group(0))
+ need_separator = True
+ elif field_value[pos] == ",": # next forwarded-element
+ need_separator = False
+ elem = {}
+ elems.append(types.MappingProxyType(elem))
+ pos += 1
+ elif field_value[pos] == ";": # next forwarded-pair
+ need_separator = False
+ pos += 1
+ elif field_value[pos] in " \t":
+ # Allow whitespace even between forwarded-pairs, though
+ # RFC 7239 doesn't. This simplifies code and is in line
+ # with Postel's law.
+ pos += 1
+ else:
+ # bad syntax here, skip to next comma
+ pos = field_value.find(",", pos)
+ return tuple(elems)
+
+ @reify
+ def scheme(self) -> str:
+ """A string representing the scheme of the request.
+
+ Hostname is resolved in this order:
+
+ - overridden value by .clone(scheme=new_scheme) call.
+ - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
+
+ 'http' or 'https'.
+ """
+ if self._transport_sslcontext:
+ return "https"
+ else:
+ return "http"
+
+ @reify
+ def method(self) -> str:
+ """Read only property for getting HTTP method.
+
+ The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
+ """
+ return self._method
+
+ @reify
+ def version(self) -> HttpVersion:
+ """Read only property for getting HTTP version of request.
+
+ Returns aiohttp.protocol.HttpVersion instance.
+ """
+ return self._version
+
+ @reify
+ def host(self) -> str:
+ """Hostname of the request.
+
+ Hostname is resolved in this order:
+
+ - overridden value by .clone(host=new_host) call.
+ - HOST HTTP header
+ - socket.getfqdn() value
+
+ For example, 'example.com' or 'localhost:8080'.
+
+ For historical reasons, the port number may be included.
+ """
+ host = self._message.headers.get(hdrs.HOST)
+ if host is not None:
+ return host
+ return socket.getfqdn()
+
+ @reify
+ def remote(self) -> Optional[str]:
+ """Remote IP of client initiated HTTP request.
+
+ The IP is resolved in this order:
+
+ - overridden value by .clone(remote=new_remote) call.
+ - peername of opened socket
+ """
+ if self._transport_peername is None:
+ return None
+ if isinstance(self._transport_peername, (list, tuple)):
+ return str(self._transport_peername[0])
+ return str(self._transport_peername)
+
+ @reify
+ def url(self) -> URL:
+ """The full URL of the request."""
+ # authority is used here because it may include the port number
+ # and we want yarl to parse it correctly
+ return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url)
+
+ @reify
+ def path(self) -> str:
+ """The URL including *PATH INFO* without the host or scheme.
+
+ E.g., ``/app/blog``
+ """
+ return self._rel_url.path
+
+ @reify
+ def path_qs(self) -> str:
+ """The URL including PATH_INFO and the query string.
+
+ E.g, /app/blog?id=10
+ """
+ return str(self._rel_url)
+
+ @reify
+ def raw_path(self) -> str:
+ """The URL including raw *PATH INFO* without the host or scheme.
+
+ Warning, the path is unquoted and may contains non valid URL characters
+
+ E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
+ """
+ return self._message.path
+
+ @reify
+ def query(self) -> "MultiMapping[str]":
+ """A multidict with all the variables in the query string."""
+ return self._rel_url.query
+
+ @reify
+ def query_string(self) -> str:
+ """The query string in the URL.
+
+ E.g., id=10
+ """
+ return self._rel_url.query_string
+
+ @reify
+ def headers(self) -> CIMultiDictProxy[str]:
+ """A case-insensitive multidict proxy with all headers."""
+ return self._headers
+
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ """A sequence of pairs for all headers."""
+ return self._message.raw_headers
+
+ @reify
+ def if_modified_since(self) -> Optional[datetime.datetime]:
+ """The value of If-Modified-Since HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
+
+ @reify
+ def if_unmodified_since(self) -> Optional[datetime.datetime]:
+ """The value of If-Unmodified-Since HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
+
+ @staticmethod
+ def _etag_values(etag_header: str) -> Iterator[ETag]:
+ """Extract `ETag` objects from raw header."""
+ if etag_header == ETAG_ANY:
+ yield ETag(
+ is_weak=False,
+ value=ETAG_ANY,
+ )
+ else:
+ for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
+ is_weak, value, garbage = match.group(2, 3, 4)
+ # Any symbol captured by 4th group means
+ # that the following sequence is invalid.
+ if garbage:
+ break
+
+ yield ETag(
+ is_weak=bool(is_weak),
+ value=value,
+ )
+
+ @classmethod
+ def _if_match_or_none_impl(
+ cls, header_value: Optional[str]
+ ) -> Optional[Tuple[ETag, ...]]:
+ if not header_value:
+ return None
+
+ return tuple(cls._etag_values(header_value))
+
+ @reify
+ def if_match(self) -> Optional[Tuple[ETag, ...]]:
+ """The value of If-Match HTTP header, or None.
+
+ This header is represented as a `tuple` of `ETag` objects.
+ """
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
+
+ @reify
+ def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
+ """The value of If-None-Match HTTP header, or None.
+
+ This header is represented as a `tuple` of `ETag` objects.
+ """
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
+
+ @reify
+ def if_range(self) -> Optional[datetime.datetime]:
+ """The value of If-Range HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_RANGE))
+
+ @reify
+ def keep_alive(self) -> bool:
+ """Is keepalive enabled by client?"""
+ return not self._message.should_close
+
+ @reify
+ def cookies(self) -> Mapping[str, str]:
+ """Return request cookies.
+
+ A read-only dictionary-like object.
+ """
+ # Use parse_cookie_header for RFC 6265 compliant Cookie header parsing
+ # that accepts special characters in cookie names (fixes #2683)
+ parsed = parse_cookie_header(self.headers.get(hdrs.COOKIE, ""))
+ # Extract values from Morsel objects
+ return MappingProxyType({name: morsel.value for name, morsel in parsed})
+
+ @reify
+ def http_range(self) -> slice:
+ """The content of Range HTTP header.
+
+ Return a slice instance.
+
+ """
+ rng = self._headers.get(hdrs.RANGE)
+ start, end = None, None
+ if rng is not None:
+ try:
+ pattern = r"^bytes=(\d*)-(\d*)$"
+ start, end = re.findall(pattern, rng, re.ASCII)[0]
+ except IndexError: # pattern was not found in header
+ raise ValueError("range not in acceptable format")
+
+ end = int(end) if end else None
+ start = int(start) if start else None
+
+ if start is None and end is not None:
+ # end with no start is to return tail of content
+ start = -end
+ end = None
+
+ if start is not None and end is not None:
+ # end is inclusive in range header, exclusive for slice
+ end += 1
+
+ if start >= end:
+ raise ValueError("start cannot be after end")
+
+ if start is end is None: # No valid range supplied
+ raise ValueError("No start or end of range specified")
+
+ return slice(start, end, 1)
+
+ @reify
+ def content(self) -> StreamReader:
+ """Return raw payload stream."""
+ return self._payload
+
+ @property
+ def has_body(self) -> bool:
+ """Return True if request's HTTP BODY can be read, False otherwise."""
+ warnings.warn(
+ "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
+ )
+ return not self._payload.at_eof()
+
+ @property
+ def can_read_body(self) -> bool:
+ """Return True if request's HTTP BODY can be read, False otherwise."""
+ return not self._payload.at_eof()
+
+ @reify
+ def body_exists(self) -> bool:
+ """Return True if request has HTTP BODY, False otherwise."""
+ return type(self._payload) is not EmptyStreamReader
+
+ async def release(self) -> None:
+ """Release request.
+
+ Eat unread part of HTTP BODY if present.
+ """
+ while not self._payload.at_eof():
+ await self._payload.readany()
+
+ async def read(self) -> bytes:
+ """Read request body if present.
+
+ Returns bytes object with full request content.
+ """
+ if self._read_bytes is None:
+ body = bytearray()
+ while True:
+ chunk = await self._payload.readany()
+ body.extend(chunk)
+ if self._client_max_size:
+ body_size = len(body)
+ if body_size >= self._client_max_size:
+ raise HTTPRequestEntityTooLarge(
+ max_size=self._client_max_size, actual_size=body_size
+ )
+ if not chunk:
+ break
+ self._read_bytes = bytes(body)
+ return self._read_bytes
+
+ async def text(self) -> str:
+ """Return BODY as text using encoding from .charset."""
+ bytes_body = await self.read()
+ encoding = self.charset or "utf-8"
+ return bytes_body.decode(encoding)
+
+ async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
+ """Return BODY as JSON."""
+ body = await self.text()
+ return loads(body)
+
+ async def multipart(self) -> MultipartReader:
+ """Return async iterator to process BODY as multipart."""
+ return MultipartReader(self._headers, self._payload)
+
+ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
+ """Return POST parameters."""
+ if self._post is not None:
+ return self._post
+ if self._method not in self.POST_METHODS:
+ self._post = MultiDictProxy(MultiDict())
+ return self._post
+
+ content_type = self.content_type
+ if content_type not in (
+ "",
+ "application/x-www-form-urlencoded",
+ "multipart/form-data",
+ ):
+ self._post = MultiDictProxy(MultiDict())
+ return self._post
+
+ out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
+
+ if content_type == "multipart/form-data":
+ multipart = await self.multipart()
+ max_size = self._client_max_size
+
+ size = 0
+ while (field := await multipart.next()) is not None:
+ field_ct = field.headers.get(hdrs.CONTENT_TYPE)
+
+ if isinstance(field, BodyPartReader):
+ if field.name is None:
+ raise ValueError("Multipart field missing name.")
+
+ # Note that according to RFC 7578, the Content-Type header
+ # is optional, even for files, so we can't assume it's
+ # present.
+ # https://tools.ietf.org/html/rfc7578#section-4.4
+ if field.filename:
+ # store file in temp file
+ tmp = await self._loop.run_in_executor(
+ None, tempfile.TemporaryFile
+ )
+ chunk = await field.read_chunk(size=2**16)
+ while chunk:
+ chunk = await field.decode(chunk)
+ await self._loop.run_in_executor(None, tmp.write, chunk)
+ size += len(chunk)
+ if 0 < max_size < size:
+ await self._loop.run_in_executor(None, tmp.close)
+ raise HTTPRequestEntityTooLarge(
+ max_size=max_size, actual_size=size
+ )
+ chunk = await field.read_chunk(size=2**16)
+ await self._loop.run_in_executor(None, tmp.seek, 0)
+
+ if field_ct is None:
+ field_ct = "application/octet-stream"
+
+ ff = FileField(
+ field.name,
+ field.filename,
+ cast(io.BufferedReader, tmp),
+ field_ct,
+ field.headers,
+ )
+ out.add(field.name, ff)
+ else:
+ # deal with ordinary data
+ value = await field.read(decode=True)
+ if field_ct is None or field_ct.startswith("text/"):
+ charset = field.get_charset(default="utf-8")
+ out.add(field.name, value.decode(charset))
+ else:
+ out.add(field.name, value)
+ size += len(value)
+ if 0 < max_size < size:
+ raise HTTPRequestEntityTooLarge(
+ max_size=max_size, actual_size=size
+ )
+ else:
+ raise ValueError(
+ "To decode nested multipart you need to use custom reader",
+ )
+ else:
+ data = await self.read()
+ if data:
+ charset = self.charset or "utf-8"
+ out.extend(
+ parse_qsl(
+ data.rstrip().decode(charset),
+ keep_blank_values=True,
+ encoding=charset,
+ )
+ )
+
+ self._post = MultiDictProxy(out)
+ return self._post
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """Extra info from protocol transport"""
+ protocol = self._protocol
+ if protocol is None:
+ return default
+
+ transport = protocol.transport
+ if transport is None:
+ return default
+
+ return transport.get_extra_info(name, default)
+
+ def __repr__(self) -> str:
+ ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
+ "ascii"
+ )
+ return "<{} {} {} >".format(
+ self.__class__.__name__, self._method, ascii_encodable_path
+ )
+
+ def __eq__(self, other: object) -> bool:
+ return id(self) == id(other)
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def _prepare_hook(self, response: StreamResponse) -> None:
+ return
+
+ def _cancel(self, exc: BaseException) -> None:
+ set_exception(self._payload, exc)
+
+ def _finish(self) -> None:
+ if self._post is None or self.content_type != "multipart/form-data":
+ return
+
+ # NOTE: Release file descriptors for the
+ # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom`
+ # NOTE: instances of files sent within multipart request body
+ # NOTE: via HTTP POST request.
+ for file_name, file_field_object in self._post.items():
+ if isinstance(file_field_object, FileField):
+ file_field_object.file.close()
+
+
+class Request(BaseRequest):
+
+ ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
+
+ _match_info: Optional["UrlMappingMatchInfo"] = None
+
+ if DEBUG:
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom {}.{} attribute "
+ "is discouraged".format(self.__class__.__name__, name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ def clone(
+ self,
+ *,
+ method: Union[str, _SENTINEL] = sentinel,
+ rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
+ headers: Union[LooseHeaders, _SENTINEL] = sentinel,
+ scheme: Union[str, _SENTINEL] = sentinel,
+ host: Union[str, _SENTINEL] = sentinel,
+ remote: Union[str, _SENTINEL] = sentinel,
+ client_max_size: Union[int, _SENTINEL] = sentinel,
+ ) -> "Request":
+ ret = super().clone(
+ method=method,
+ rel_url=rel_url,
+ headers=headers,
+ scheme=scheme,
+ host=host,
+ remote=remote,
+ client_max_size=client_max_size,
+ )
+ new_ret = cast(Request, ret)
+ new_ret._match_info = self._match_info
+ return new_ret
+
+ @reify
+ def match_info(self) -> "UrlMappingMatchInfo":
+ """Result of route resolving."""
+ match_info = self._match_info
+ assert match_info is not None
+ return match_info
+
+ @property
+ def app(self) -> "Application":
+ """Application instance."""
+ match_info = self._match_info
+ assert match_info is not None
+ return match_info.current_app
+
+ @property
+ def config_dict(self) -> ChainMapProxy:
+ match_info = self._match_info
+ assert match_info is not None
+ lst = match_info.apps
+ app = self.app
+ idx = lst.index(app)
+ sublist = list(reversed(lst[: idx + 1]))
+ return ChainMapProxy(sublist)
+
+ async def _prepare_hook(self, response: StreamResponse) -> None:
+ match_info = self._match_info
+ if match_info is None:
+ return
+ for app in match_info._apps:
+ if on_response_prepare := app.on_response_prepare:
+ await on_response_prepare.send(self, response)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_response.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_response.py"
new file mode 100644
index 0000000..e5f8b6c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_response.py"
@@ -0,0 +1,856 @@
+import asyncio
+import collections.abc
+import datetime
+import enum
+import json
+import math
+import time
+import warnings
+from concurrent.futures import Executor
+from http import HTTPStatus
+from http.cookies import SimpleCookie
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterator,
+ MutableMapping,
+ Optional,
+ Union,
+ cast,
+)
+
+from multidict import CIMultiDict, istr
+
+from . import hdrs, payload
+from .abc import AbstractStreamWriter
+from .compression_utils import ZLibCompressor
+from .helpers import (
+ ETAG_ANY,
+ QUOTED_ETAG_RE,
+ ETag,
+ HeadersMixin,
+ must_be_empty_body,
+ parse_http_date,
+ rfc822_formatted_time,
+ sentinel,
+ should_remove_content_length,
+ validate_etag_value,
+)
+from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
+from .payload import Payload
+from .typedefs import JSONEncoder, LooseHeaders
+
+REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
+LARGE_BODY_SIZE = 1024**2
+
+__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
+
+
+if TYPE_CHECKING:
+ from .web_request import BaseRequest
+
+ BaseClass = MutableMapping[str, Any]
+else:
+ BaseClass = collections.abc.MutableMapping
+
+
+# TODO(py311): Convert to StrEnum for wider use
+class ContentCoding(enum.Enum):
+ # The content codings that we have support for.
+ #
+ # Additional registered codings are listed at:
+ # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
+ deflate = "deflate"
+ gzip = "gzip"
+ identity = "identity"
+
+
+CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}
+
+############################################################
+# HTTP Response classes
+############################################################
+
+
+class StreamResponse(BaseClass, HeadersMixin):
+
+ _body: Union[None, bytes, bytearray, Payload]
+ _length_check = True
+ _body = None
+ _keep_alive: Optional[bool] = None
+ _chunked: bool = False
+ _compression: bool = False
+ _compression_strategy: Optional[int] = None
+ _compression_force: Optional[ContentCoding] = None
+ _req: Optional["BaseRequest"] = None
+ _payload_writer: Optional[AbstractStreamWriter] = None
+ _eof_sent: bool = False
+ _must_be_empty_body: Optional[bool] = None
+ _body_length = 0
+ _cookies: Optional[SimpleCookie] = None
+ _send_headers_immediately = True
+
+ def __init__(
+ self,
+ *,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ _real_headers: Optional[CIMultiDict[str]] = None,
+ ) -> None:
+ """Initialize a new stream response object.
+
+ _real_headers is an internal parameter used to pass a pre-populated
+ headers object. It is used by the `Response` class to avoid copying
+ the headers when creating a new response object. It is not intended
+ to be used by external code.
+ """
+ self._state: Dict[str, Any] = {}
+
+ if _real_headers is not None:
+ self._headers = _real_headers
+ elif headers is not None:
+ self._headers: CIMultiDict[str] = CIMultiDict(headers)
+ else:
+ self._headers = CIMultiDict()
+
+ self._set_status(status, reason)
+
+ @property
+ def prepared(self) -> bool:
+ return self._eof_sent or self._payload_writer is not None
+
+ @property
+ def task(self) -> "Optional[asyncio.Task[None]]":
+ if self._req:
+ return self._req.task
+ else:
+ return None
+
+ @property
+ def status(self) -> int:
+ return self._status
+
+ @property
+ def chunked(self) -> bool:
+ return self._chunked
+
+ @property
+ def compression(self) -> bool:
+ return self._compression
+
+ @property
+ def reason(self) -> str:
+ return self._reason
+
+ def set_status(
+ self,
+ status: int,
+ reason: Optional[str] = None,
+ ) -> None:
+ assert (
+ not self.prepared
+ ), "Cannot change the response status code after the headers have been sent"
+ self._set_status(status, reason)
+
+ def _set_status(self, status: int, reason: Optional[str]) -> None:
+ self._status = int(status)
+ if reason is None:
+ reason = REASON_PHRASES.get(self._status, "")
+ elif "\n" in reason:
+ raise ValueError("Reason cannot contain \\n")
+ self._reason = reason
+
+ @property
+ def keep_alive(self) -> Optional[bool]:
+ return self._keep_alive
+
+ def force_close(self) -> None:
+ self._keep_alive = False
+
+ @property
+ def body_length(self) -> int:
+ return self._body_length
+
+ @property
+ def output_length(self) -> int:
+ warnings.warn("output_length is deprecated", DeprecationWarning)
+ assert self._payload_writer
+ return self._payload_writer.buffer_size
+
+ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
+ """Enables automatic chunked transfer encoding."""
+ if hdrs.CONTENT_LENGTH in self._headers:
+ raise RuntimeError(
+ "You can't enable chunked encoding when a content length is set"
+ )
+ if chunk_size is not None:
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
+ self._chunked = True
+
+ def enable_compression(
+ self,
+ force: Optional[Union[bool, ContentCoding]] = None,
+ strategy: Optional[int] = None,
+ ) -> None:
+ """Enables response compression encoding."""
+ # Backwards compatibility for when force was a bool <0.17.
+ if isinstance(force, bool):
+ force = ContentCoding.deflate if force else ContentCoding.identity
+ warnings.warn(
+ "Using boolean for force is deprecated #3318", DeprecationWarning
+ )
+ elif force is not None:
+ assert isinstance(
+ force, ContentCoding
+ ), "force should one of None, bool or ContentEncoding"
+
+ self._compression = True
+ self._compression_force = force
+ self._compression_strategy = strategy
+
+ @property
+ def headers(self) -> "CIMultiDict[str]":
+ return self._headers
+
+ @property
+ def cookies(self) -> SimpleCookie:
+ if self._cookies is None:
+ self._cookies = SimpleCookie()
+ return self._cookies
+
+ def set_cookie(
+ self,
+ name: str,
+ value: str,
+ *,
+ expires: Optional[str] = None,
+ domain: Optional[str] = None,
+ max_age: Optional[Union[int, str]] = None,
+ path: str = "/",
+ secure: Optional[bool] = None,
+ httponly: Optional[bool] = None,
+ version: Optional[str] = None,
+ samesite: Optional[str] = None,
+ partitioned: Optional[bool] = None,
+ ) -> None:
+ """Set or update response cookie.
+
+ Sets new cookie or updates existent with new value.
+ Also updates only those params which are not None.
+ """
+ if self._cookies is None:
+ self._cookies = SimpleCookie()
+
+ self._cookies[name] = value
+ c = self._cookies[name]
+
+ if expires is not None:
+ c["expires"] = expires
+ elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
+ del c["expires"]
+
+ if domain is not None:
+ c["domain"] = domain
+
+ if max_age is not None:
+ c["max-age"] = str(max_age)
+ elif "max-age" in c:
+ del c["max-age"]
+
+ c["path"] = path
+
+ if secure is not None:
+ c["secure"] = secure
+ if httponly is not None:
+ c["httponly"] = httponly
+ if version is not None:
+ c["version"] = version
+ if samesite is not None:
+ c["samesite"] = samesite
+
+ if partitioned is not None:
+ c["partitioned"] = partitioned
+
+ def del_cookie(
+ self,
+ name: str,
+ *,
+ domain: Optional[str] = None,
+ path: str = "/",
+ secure: Optional[bool] = None,
+ httponly: Optional[bool] = None,
+ samesite: Optional[str] = None,
+ ) -> None:
+ """Delete cookie.
+
+ Creates new empty expired cookie.
+ """
+ # TODO: do we need domain/path here?
+ if self._cookies is not None:
+ self._cookies.pop(name, None)
+ self.set_cookie(
+ name,
+ "",
+ max_age=0,
+ expires="Thu, 01 Jan 1970 00:00:00 GMT",
+ domain=domain,
+ path=path,
+ secure=secure,
+ httponly=httponly,
+ samesite=samesite,
+ )
+
+ @property
+ def content_length(self) -> Optional[int]:
+ # Just a placeholder for adding setter
+ return super().content_length
+
+ @content_length.setter
+ def content_length(self, value: Optional[int]) -> None:
+ if value is not None:
+ value = int(value)
+ if self._chunked:
+ raise RuntimeError(
+ "You can't set content length when chunked encoding is enable"
+ )
+ self._headers[hdrs.CONTENT_LENGTH] = str(value)
+ else:
+ self._headers.pop(hdrs.CONTENT_LENGTH, None)
+
+ @property
+ def content_type(self) -> str:
+ # Just a placeholder for adding setter
+ return super().content_type
+
+ @content_type.setter
+ def content_type(self, value: str) -> None:
+ self.content_type # read header values if needed
+ self._content_type = str(value)
+ self._generate_content_type_header()
+
+ @property
+ def charset(self) -> Optional[str]:
+ # Just a placeholder for adding setter
+ return super().charset
+
+ @charset.setter
+ def charset(self, value: Optional[str]) -> None:
+ ctype = self.content_type # read header values if needed
+ if ctype == "application/octet-stream":
+ raise RuntimeError(
+ "Setting charset for application/octet-stream "
+ "doesn't make sense, setup content_type first"
+ )
+ assert self._content_dict is not None
+ if value is None:
+ self._content_dict.pop("charset", None)
+ else:
+ self._content_dict["charset"] = str(value).lower()
+ self._generate_content_type_header()
+
+ @property
+ def last_modified(self) -> Optional[datetime.datetime]:
+ """The value of Last-Modified HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
+
+ @last_modified.setter
+ def last_modified(
+ self, value: Optional[Union[int, float, datetime.datetime, str]]
+ ) -> None:
+ if value is None:
+ self._headers.pop(hdrs.LAST_MODIFIED, None)
+ elif isinstance(value, (int, float)):
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
+ )
+ elif isinstance(value, datetime.datetime):
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
+ )
+ elif isinstance(value, str):
+ self._headers[hdrs.LAST_MODIFIED] = value
+ else:
+ msg = f"Unsupported type for last_modified: {type(value).__name__}"
+ raise TypeError(msg)
+
+ @property
+ def etag(self) -> Optional[ETag]:
+ quoted_value = self._headers.get(hdrs.ETAG)
+ if not quoted_value:
+ return None
+ elif quoted_value == ETAG_ANY:
+ return ETag(value=ETAG_ANY)
+ match = QUOTED_ETAG_RE.fullmatch(quoted_value)
+ if not match:
+ return None
+ is_weak, value = match.group(1, 2)
+ return ETag(
+ is_weak=bool(is_weak),
+ value=value,
+ )
+
+ @etag.setter
+ def etag(self, value: Optional[Union[ETag, str]]) -> None:
+ if value is None:
+ self._headers.pop(hdrs.ETAG, None)
+ elif (isinstance(value, str) and value == ETAG_ANY) or (
+ isinstance(value, ETag) and value.value == ETAG_ANY
+ ):
+ self._headers[hdrs.ETAG] = ETAG_ANY
+ elif isinstance(value, str):
+ validate_etag_value(value)
+ self._headers[hdrs.ETAG] = f'"{value}"'
+ elif isinstance(value, ETag) and isinstance(value.value, str):
+ validate_etag_value(value.value)
+ hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
+ self._headers[hdrs.ETAG] = hdr_value
+ else:
+ raise ValueError(
+ f"Unsupported etag type: {type(value)}. "
+ f"etag must be str, ETag or None"
+ )
+
+ def _generate_content_type_header(
+ self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
+ ) -> None:
+ assert self._content_dict is not None
+ assert self._content_type is not None
+ params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
+ if params:
+ ctype = self._content_type + "; " + params
+ else:
+ ctype = self._content_type
+ self._headers[CONTENT_TYPE] = ctype
+
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
+ if coding is ContentCoding.identity:
+ return
+ assert self._payload_writer is not None
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
+ self._payload_writer.enable_compression(
+ coding.value, self._compression_strategy
+ )
+ # Compressed payload may have different content length,
+ # remove the header
+ self._headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ async def _start_compression(self, request: "BaseRequest") -> None:
+ if self._compression_force:
+ await self._do_start_compression(self._compression_force)
+ return
+ # Encoding comparisons should be case-insensitive
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
+ accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
+ for value, coding in CONTENT_CODINGS.items():
+ if value in accept_encoding:
+ await self._do_start_compression(coding)
+ return
+
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
+ if self._eof_sent:
+ return None
+ if self._payload_writer is not None:
+ return self._payload_writer
+ self._must_be_empty_body = must_be_empty_body(request.method, self.status)
+ return await self._start(request)
+
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
+ self._req = request
+ writer = self._payload_writer = request._payload_writer
+
+ await self._prepare_headers()
+ await request._prepare_hook(self)
+ await self._write_headers()
+
+ return writer
+
+ async def _prepare_headers(self) -> None:
+ request = self._req
+ assert request is not None
+ writer = self._payload_writer
+ assert writer is not None
+ keep_alive = self._keep_alive
+ if keep_alive is None:
+ keep_alive = request.keep_alive
+ self._keep_alive = keep_alive
+
+ version = request.version
+
+ headers = self._headers
+ if self._cookies:
+ for cookie in self._cookies.values():
+ value = cookie.output(header="")[1:]
+ headers.add(hdrs.SET_COOKIE, value)
+
+ if self._compression:
+ await self._start_compression(request)
+
+ if self._chunked:
+ if version != HttpVersion11:
+ raise RuntimeError(
+ "Using chunked encoding is forbidden "
+ "for HTTP/{0.major}.{0.minor}".format(request.version)
+ )
+ if not self._must_be_empty_body:
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ elif self._length_check: # Disabled for WebSockets
+ writer.length = self.content_length
+ if writer.length is None:
+ if version >= HttpVersion11:
+ if not self._must_be_empty_body:
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ elif not self._must_be_empty_body:
+ keep_alive = False
+
+ # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
+ # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
+ if self._must_be_empty_body:
+ if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
+ request.method, self.status
+ ):
+ del headers[hdrs.CONTENT_LENGTH]
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
+ if hdrs.TRANSFER_ENCODING in headers:
+ del headers[hdrs.TRANSFER_ENCODING]
+ elif (writer.length if self._length_check else self.content_length) != 0:
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
+ headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
+ headers.setdefault(hdrs.DATE, rfc822_formatted_time())
+ headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
+
+ # connection header
+ if hdrs.CONNECTION not in headers:
+ if keep_alive:
+ if version == HttpVersion10:
+ headers[hdrs.CONNECTION] = "keep-alive"
+ elif version == HttpVersion11:
+ headers[hdrs.CONNECTION] = "close"
+
+ async def _write_headers(self) -> None:
+ request = self._req
+ assert request is not None
+ writer = self._payload_writer
+ assert writer is not None
+ # status line
+ version = request.version
+ status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"
+ await writer.write_headers(status_line, self._headers)
+ # Send headers immediately if not opted into buffering
+ if self._send_headers_immediately:
+ writer.send_headers()
+
+ async def write(self, data: Union[bytes, bytearray, memoryview]) -> None:
+ assert isinstance(
+ data, (bytes, bytearray, memoryview)
+ ), "data argument must be byte-ish (%r)" % type(data)
+
+ if self._eof_sent:
+ raise RuntimeError("Cannot call write() after write_eof()")
+ if self._payload_writer is None:
+ raise RuntimeError("Cannot call write() before prepare()")
+
+ await self._payload_writer.write(data)
+
+ async def drain(self) -> None:
+ assert not self._eof_sent, "EOF has already been sent"
+ assert self._payload_writer is not None, "Response has not been started"
+ warnings.warn(
+ "drain method is deprecated, use await resp.write()",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ await self._payload_writer.drain()
+
+ async def write_eof(self, data: bytes = b"") -> None:
+ assert isinstance(
+ data, (bytes, bytearray, memoryview)
+ ), "data argument must be byte-ish (%r)" % type(data)
+
+ if self._eof_sent:
+ return
+
+ assert self._payload_writer is not None, "Response has not been started"
+
+ await self._payload_writer.write_eof(data)
+ self._eof_sent = True
+ self._req = None
+ self._body_length = self._payload_writer.output_size
+ self._payload_writer = None
+
+ def __repr__(self) -> str:
+ if self._eof_sent:
+ info = "eof"
+ elif self.prepared:
+ assert self._req is not None
+ info = f"{self._req.method} {self._req.path} "
+ else:
+ info = "not prepared"
+ return f"<{self.__class__.__name__} {self.reason} {info}>"
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ def __hash__(self) -> int:
+ return hash(id(self))
+
+ def __eq__(self, other: object) -> bool:
+ return self is other
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class Response(StreamResponse):
+
+ _compressed_body: Optional[bytes] = None
+ _send_headers_immediately = False
+
+ def __init__(
+ self,
+ *,
+ body: Any = None,
+ status: int = 200,
+ reason: Optional[str] = None,
+ text: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ content_type: Optional[str] = None,
+ charset: Optional[str] = None,
+ zlib_executor_size: Optional[int] = None,
+ zlib_executor: Optional[Executor] = None,
+ ) -> None:
+ if body is not None and text is not None:
+ raise ValueError("body and text are not allowed together")
+
+ if headers is None:
+ real_headers: CIMultiDict[str] = CIMultiDict()
+ else:
+ real_headers = CIMultiDict(headers)
+
+ if content_type is not None and "charset" in content_type:
+ raise ValueError("charset must not be in content_type argument")
+
+ if text is not None:
+ if hdrs.CONTENT_TYPE in real_headers:
+ if content_type or charset:
+ raise ValueError(
+ "passing both Content-Type header and "
+ "content_type or charset params "
+ "is forbidden"
+ )
+ else:
+ # fast path for filling headers
+ if not isinstance(text, str):
+ raise TypeError("text argument must be str (%r)" % type(text))
+ if content_type is None:
+ content_type = "text/plain"
+ if charset is None:
+ charset = "utf-8"
+ real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
+ body = text.encode(charset)
+ text = None
+ elif hdrs.CONTENT_TYPE in real_headers:
+ if content_type is not None or charset is not None:
+ raise ValueError(
+ "passing both Content-Type header and "
+ "content_type or charset params "
+ "is forbidden"
+ )
+ elif content_type is not None:
+ if charset is not None:
+ content_type += "; charset=" + charset
+ real_headers[hdrs.CONTENT_TYPE] = content_type
+
+ super().__init__(status=status, reason=reason, _real_headers=real_headers)
+
+ if text is not None:
+ self.text = text
+ else:
+ self.body = body
+
+ self._zlib_executor_size = zlib_executor_size
+ self._zlib_executor = zlib_executor
+
+ @property
+ def body(self) -> Optional[Union[bytes, Payload]]:
+ return self._body
+
+ @body.setter
+ def body(self, body: Any) -> None:
+ if body is None:
+ self._body = None
+ elif isinstance(body, (bytes, bytearray)):
+ self._body = body
+ else:
+ try:
+ self._body = body = payload.PAYLOAD_REGISTRY.get(body)
+ except payload.LookupError:
+ raise ValueError("Unsupported body type %r" % type(body))
+
+ headers = self._headers
+
+ # set content-type
+ if hdrs.CONTENT_TYPE not in headers:
+ headers[hdrs.CONTENT_TYPE] = body.content_type
+
+ # copy payload headers
+ if body.headers:
+ for key, value in body.headers.items():
+ if key not in headers:
+ headers[key] = value
+
+ self._compressed_body = None
+
+ @property
+ def text(self) -> Optional[str]:
+ if self._body is None:
+ return None
+ # Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O
+ # This is generally safe as most common payloads (BytesPayload, StringPayload)
+ # don't do blocking I/O, but be careful with file-based payloads
+ return self._body.decode(self.charset or "utf-8")
+
+ @text.setter
+ def text(self, text: str) -> None:
+ assert text is None or isinstance(
+ text, str
+ ), "text argument must be str (%r)" % type(text)
+
+ if self.content_type == "application/octet-stream":
+ self.content_type = "text/plain"
+ if self.charset is None:
+ self.charset = "utf-8"
+
+ self._body = text.encode(self.charset)
+ self._compressed_body = None
+
+ @property
+ def content_length(self) -> Optional[int]:
+ if self._chunked:
+ return None
+
+ if hdrs.CONTENT_LENGTH in self._headers:
+ return int(self._headers[hdrs.CONTENT_LENGTH])
+
+ if self._compressed_body is not None:
+ # Return length of the compressed body
+ return len(self._compressed_body)
+ elif isinstance(self._body, Payload):
+ # A payload without content length, or a compressed payload
+ return None
+ elif self._body is not None:
+ return len(self._body)
+ else:
+ return 0
+
+ @content_length.setter
+ def content_length(self, value: Optional[int]) -> None:
+ raise RuntimeError("Content length is set automatically")
+
+ async def write_eof(self, data: bytes = b"") -> None:
+ if self._eof_sent:
+ return
+ if self._compressed_body is None:
+ body: Optional[Union[bytes, Payload]] = self._body
+ else:
+ body = self._compressed_body
+ assert not data, f"data arg is not supported, got {data!r}"
+ assert self._req is not None
+ assert self._payload_writer is not None
+ if body is None or self._must_be_empty_body:
+ await super().write_eof()
+ elif isinstance(self._body, Payload):
+ await self._body.write(self._payload_writer)
+ await self._body.close()
+ await super().write_eof()
+ else:
+ await super().write_eof(cast(bytes, body))
+
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
+ if hdrs.CONTENT_LENGTH in self._headers:
+ if should_remove_content_length(request.method, self.status):
+ del self._headers[hdrs.CONTENT_LENGTH]
+ elif not self._chunked:
+ if isinstance(self._body, Payload):
+ if (size := self._body.size) is not None:
+ self._headers[hdrs.CONTENT_LENGTH] = str(size)
+ else:
+ body_len = len(self._body) if self._body else "0"
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
+ if body_len != "0" or (
+ self.status != 304 and request.method not in hdrs.METH_HEAD_ALL
+ ):
+ self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
+
+ return await super()._start(request)
+
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
+ if self._chunked or isinstance(self._body, Payload):
+ return await super()._do_start_compression(coding)
+ if coding is ContentCoding.identity:
+ return
+ # Instead of using _payload_writer.enable_compression,
+ # compress the whole body
+ compressor = ZLibCompressor(
+ encoding=coding.value,
+ max_sync_chunk_size=self._zlib_executor_size,
+ executor=self._zlib_executor,
+ )
+ assert self._body is not None
+ if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
+ warnings.warn(
+ "Synchronous compression of large response bodies "
+ f"({len(self._body)} bytes) might block the async event loop. "
+ "Consider providing a custom value to zlib_executor_size/"
+ "zlib_executor response properties or disabling compression on it."
+ )
+ self._compressed_body = (
+ await compressor.compress(self._body) + compressor.flush()
+ )
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
+
+
+def json_response(
+ data: Any = sentinel,
+ *,
+ text: Optional[str] = None,
+ body: Optional[bytes] = None,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ content_type: str = "application/json",
+ dumps: JSONEncoder = json.dumps,
+) -> Response:
+ if data is not sentinel:
+ if text or body:
+ raise ValueError("only one of data, text, or body should be specified")
+ else:
+ text = dumps(data)
+ return Response(
+ text=text,
+ body=body,
+ status=status,
+ reason=reason,
+ headers=headers,
+ content_type=content_type,
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_routedef.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_routedef.py"
new file mode 100644
index 0000000..f51b6cd
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_routedef.py"
@@ -0,0 +1,214 @@
+import abc
+import os # noqa
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Type,
+ Union,
+ overload,
+)
+
+import attr
+
+from . import hdrs
+from .abc import AbstractView
+from .typedefs import Handler, PathLike
+
+if TYPE_CHECKING:
+ from .web_request import Request
+ from .web_response import StreamResponse
+ from .web_urldispatcher import AbstractRoute, UrlDispatcher
+else:
+ Request = StreamResponse = UrlDispatcher = AbstractRoute = None
+
+
+__all__ = (
+ "AbstractRouteDef",
+ "RouteDef",
+ "StaticDef",
+ "RouteTableDef",
+ "head",
+ "options",
+ "get",
+ "post",
+ "patch",
+ "put",
+ "delete",
+ "route",
+ "view",
+ "static",
+)
+
+
+class AbstractRouteDef(abc.ABC):
+ @abc.abstractmethod
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ pass # pragma: no cover
+
+
+_HandlerType = Union[Type[AbstractView], Handler]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class RouteDef(AbstractRouteDef):
+ method: str
+ path: str
+ handler: _HandlerType
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return "<RouteDef {method} {path} -> {handler.__name__!r}{info}>".format(
+ method=self.method, path=self.path, handler=self.handler, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ if self.method in hdrs.METH_ALL:
+ reg = getattr(router, "add_" + self.method.lower())
+ return [reg(self.path, self.handler, **self.kwargs)]
+ else:
+ return [
+ router.add_route(self.method, self.path, self.handler, **self.kwargs)
+ ]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class StaticDef(AbstractRouteDef):
+ prefix: str
+ path: PathLike
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return "<StaticDef {prefix} -> {path}{info}>".format(
+ prefix=self.prefix, path=self.path, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ resource = router.add_static(self.prefix, self.path, **self.kwargs)
+ routes = resource.get_info().get("routes", {})
+ return list(routes.values())
+
+
+def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return RouteDef(method, path, handler, kwargs)
+
+
+def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+
+def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+
+def get(
+ path: str,
+ handler: _HandlerType,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+) -> RouteDef:
+ return route(
+ hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
+ )
+
+
+def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_POST, path, handler, **kwargs)
+
+
+def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PUT, path, handler, **kwargs)
+
+
+def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+
+def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+
+def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_ANY, path, handler, **kwargs)
+
+
+def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
+ return StaticDef(prefix, path, kwargs)
+
+
+_Deco = Callable[[_HandlerType], _HandlerType]
+
+
+class RouteTableDef(Sequence[AbstractRouteDef]):
+ """Route definition table"""
+
+ def __init__(self) -> None:
+ self._items: List[AbstractRouteDef] = []
+
+ def __repr__(self) -> str:
+ return f"<RouteTableDef count={len(self._items)}>"
+
+ @overload
+ def __getitem__(self, index: int) -> AbstractRouteDef: ...
+
+ @overload
+ def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ...
+
+ def __getitem__(self, index): # type: ignore[no-untyped-def]
+ return self._items[index]
+
+ def __iter__(self) -> Iterator[AbstractRouteDef]:
+ return iter(self._items)
+
+ def __len__(self) -> int:
+ return len(self._items)
+
+ def __contains__(self, item: object) -> bool:
+ return item in self._items
+
+ def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
+ def inner(handler: _HandlerType) -> _HandlerType:
+ self._items.append(RouteDef(method, path, handler, kwargs))
+ return handler
+
+ return inner
+
+ def head(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_HEAD, path, **kwargs)
+
+ def get(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_GET, path, **kwargs)
+
+ def post(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_POST, path, **kwargs)
+
+ def put(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PUT, path, **kwargs)
+
+ def patch(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PATCH, path, **kwargs)
+
+ def delete(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_DELETE, path, **kwargs)
+
+ def options(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_OPTIONS, path, **kwargs)
+
+ def view(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_ANY, path, **kwargs)
+
+ def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
+ self._items.append(StaticDef(prefix, path, kwargs))
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_runner.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_runner.py"
new file mode 100644
index 0000000..bcfec72
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_runner.py"
@@ -0,0 +1,399 @@
+import asyncio
+import signal
+import socket
+import warnings
+from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING, Any, List, Optional, Set
+
+from yarl import URL
+
+from .typedefs import PathLike
+from .web_app import Application
+from .web_server import Server
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ try:
+ from ssl import SSLContext
+ except ImportError: # pragma: no cover
+ SSLContext = object # type: ignore[misc,assignment]
+
+__all__ = (
+ "BaseSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ "SockSite",
+ "BaseRunner",
+ "AppRunner",
+ "ServerRunner",
+ "GracefulExit",
+)
+
+
+class GracefulExit(SystemExit):
+ code = 1
+
+
+def _raise_graceful_exit() -> None:
+ raise GracefulExit()
+
+
+class BaseSite(ABC):
+ __slots__ = ("_runner", "_ssl_context", "_backlog", "_server")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ if runner.server is None:
+ raise RuntimeError("Call runner.setup() before making a site")
+ if shutdown_timeout != 60.0:
+ msg = "shutdown_timeout should be set on BaseRunner"
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+ runner._shutdown_timeout = shutdown_timeout
+ self._runner = runner
+ self._ssl_context = ssl_context
+ self._backlog = backlog
+ self._server: Optional[asyncio.AbstractServer] = None
+
+ @property
+ @abstractmethod
+ def name(self) -> str:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def start(self) -> None:
+ self._runner._reg_site(self)
+
+ async def stop(self) -> None:
+ self._runner._check_site(self)
+ if self._server is not None: # Maybe not started yet
+ self._server.close()
+
+ self._runner._unreg_site(self)
+
+
+class TCPSite(BaseSite):
+ __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._host = host
+ if port is None:
+ port = 8443 if self._ssl_context else 8080
+ self._port = port
+ self._reuse_address = reuse_address
+ self._reuse_port = reuse_port
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ host = "0.0.0.0" if not self._host else self._host
+ return str(URL.build(scheme=scheme, host=host, port=self._port))
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server,
+ self._host,
+ self._port,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
+ reuse_address=self._reuse_address,
+ reuse_port=self._reuse_port,
+ )
+
+
+class UnixSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ path: PathLike,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ return f"{scheme}://unix:{self._path}:"
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_unix_server(
+ server,
+ self._path,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
+ )
+
+
+class NamedPipeSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
+ ) -> None:
+ loop = asyncio.get_event_loop()
+ if not isinstance(
+ loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor loop under windows"
+ )
+ super().__init__(runner, shutdown_timeout=shutdown_timeout)
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ return self._path
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ _server = await loop.start_serving_pipe( # type: ignore[attr-defined]
+ server, self._path
+ )
+ self._server = _server[0]
+
+
+class SockSite(BaseSite):
+ __slots__ = ("_sock", "_name")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ sock: socket.socket,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._sock = sock
+ scheme = "https" if self._ssl_context else "http"
+ if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
+ name = f"{scheme}://unix:{sock.getsockname()}:"
+ else:
+ host, port = sock.getsockname()[:2]
+ name = str(URL.build(scheme=scheme, host=host, port=port))
+ self._name = name
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
+ )
+
+
+class BaseRunner(ABC):
+ __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout")
+
+ def __init__(
+ self,
+ *,
+ handle_signals: bool = False,
+ shutdown_timeout: float = 60.0,
+ **kwargs: Any,
+ ) -> None:
+ self._handle_signals = handle_signals
+ self._kwargs = kwargs
+ self._server: Optional[Server] = None
+ self._sites: List[BaseSite] = []
+ self._shutdown_timeout = shutdown_timeout
+
+ @property
+ def server(self) -> Optional[Server]:
+ return self._server
+
+ @property
+ def addresses(self) -> List[Any]:
+ ret: List[Any] = []
+ for site in self._sites:
+ server = site._server
+ if server is not None:
+ sockets = server.sockets # type: ignore[attr-defined]
+ if sockets is not None:
+ for sock in sockets:
+ ret.append(sock.getsockname())
+ return ret
+
+ @property
+ def sites(self) -> Set[BaseSite]:
+ return set(self._sites)
+
+ async def setup(self) -> None:
+ loop = asyncio.get_event_loop()
+
+ if self._handle_signals:
+ try:
+ loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
+ loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
+ except NotImplementedError: # pragma: no cover
+ # add_signal_handler is not implemented on Windows
+ pass
+
+ self._server = await self._make_server()
+
+ @abstractmethod
+ async def shutdown(self) -> None:
+ """Call any shutdown hooks to help server close gracefully."""
+
+ async def cleanup(self) -> None:
+ # The loop over sites is intentional, an exception on gather()
+ # leaves self._sites in unpredictable state.
+ # The loop guaranties that a site is either deleted on success or
+ # still present on failure
+ for site in list(self._sites):
+ await site.stop()
+
+ if self._server: # If setup succeeded
+ # Yield to event loop to ensure incoming requests prior to stopping the sites
+ # have all started to be handled before we proceed to close idle connections.
+ await asyncio.sleep(0)
+ self._server.pre_shutdown()
+ await self.shutdown()
+ await self._server.shutdown(self._shutdown_timeout)
+ await self._cleanup_server()
+
+ self._server = None
+ if self._handle_signals:
+ loop = asyncio.get_running_loop()
+ try:
+ loop.remove_signal_handler(signal.SIGINT)
+ loop.remove_signal_handler(signal.SIGTERM)
+ except NotImplementedError: # pragma: no cover
+ # remove_signal_handler is not implemented on Windows
+ pass
+
+ @abstractmethod
+ async def _make_server(self) -> Server:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def _cleanup_server(self) -> None:
+ pass # pragma: no cover
+
+ def _reg_site(self, site: BaseSite) -> None:
+ if site in self._sites:
+ raise RuntimeError(f"Site {site} is already registered in runner {self}")
+ self._sites.append(site)
+
+ def _check_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+
+ def _unreg_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+ self._sites.remove(site)
+
+
+class ServerRunner(BaseRunner):
+ """Low-level web server runner"""
+
+ __slots__ = ("_web_server",)
+
+ def __init__(
+ self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ self._web_server = web_server
+
+ async def shutdown(self) -> None:
+ pass
+
+ async def _make_server(self) -> Server:
+ return self._web_server
+
+ async def _cleanup_server(self) -> None:
+ pass
+
+
+class AppRunner(BaseRunner):
+ """Web Application runner"""
+
+ __slots__ = ("_app",)
+
+ def __init__(
+ self, app: Application, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ if not isinstance(app, Application):
+ raise TypeError(
+ "The first argument should be web.Application "
+ "instance, got {!r}".format(app)
+ )
+ self._app = app
+
+ @property
+ def app(self) -> Application:
+ return self._app
+
+ async def shutdown(self) -> None:
+ await self._app.shutdown()
+
+ async def _make_server(self) -> Server:
+ loop = asyncio.get_event_loop()
+ self._app._set_loop(loop)
+ self._app.on_startup.freeze()
+ await self._app.startup()
+ self._app.freeze()
+
+ return self._app._make_handler(loop=loop, **self._kwargs)
+
+ async def _cleanup_server(self) -> None:
+ await self._app.cleanup()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_server.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_server.py"
new file mode 100644
index 0000000..328aca1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_server.py"
@@ -0,0 +1,84 @@
+"""Low level HTTP server."""
+
+import asyncio
+from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
+
+from .abc import AbstractStreamWriter
+from .http_parser import RawRequestMessage
+from .streams import StreamReader
+from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
+from .web_request import BaseRequest
+
+__all__ = ("Server",)
+
+
+class Server:
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ request_factory: Optional[_RequestFactory] = None,
+ handler_cancellation: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._loop = loop or asyncio.get_running_loop()
+ self._connections: Dict[RequestHandler, asyncio.Transport] = {}
+ self._kwargs = kwargs
+ # requests_count is the number of requests being processed by the server
+ # for the lifetime of the server.
+ self.requests_count = 0
+ self.request_handler = handler
+ self.request_factory = request_factory or self._make_request
+ self.handler_cancellation = handler_cancellation
+
+ @property
+ def connections(self) -> List[RequestHandler]:
+ return list(self._connections.keys())
+
+ def connection_made(
+ self, handler: RequestHandler, transport: asyncio.Transport
+ ) -> None:
+ self._connections[handler] = transport
+
+ def connection_lost(
+ self, handler: RequestHandler, exc: Optional[BaseException] = None
+ ) -> None:
+ if handler in self._connections:
+ if handler._task_handler:
+ handler._task_handler.add_done_callback(
+ lambda f: self._connections.pop(handler, None)
+ )
+ else:
+ del self._connections[handler]
+
+ def _make_request(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: RequestHandler,
+ writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ ) -> BaseRequest:
+ return BaseRequest(message, payload, protocol, writer, task, self._loop)
+
+ def pre_shutdown(self) -> None:
+ for conn in self._connections:
+ conn.close()
+
+ async def shutdown(self, timeout: Optional[float] = None) -> None:
+ coros = (conn.shutdown(timeout) for conn in self._connections)
+ await asyncio.gather(*coros)
+ self._connections.clear()
+
+ def __call__(self) -> RequestHandler:
+ try:
+ return RequestHandler(self, loop=self._loop, **self._kwargs)
+ except TypeError:
+ # Failsafe creation: remove all custom handler_args
+ kwargs = {
+ k: v
+ for k, v in self._kwargs.items()
+ if k in ["debug", "access_log_class"]
+ }
+ return RequestHandler(self, loop=self._loop, **kwargs)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_urldispatcher.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_urldispatcher.py"
new file mode 100644
index 0000000..cfa57a3
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_urldispatcher.py"
@@ -0,0 +1,1305 @@
+import abc
+import asyncio
+import base64
+import functools
+import hashlib
+import html
+import inspect
+import keyword
+import os
+import platform
+import re
+import sys
+import warnings
+from functools import wraps
+from pathlib import Path
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Container,
+ Dict,
+ Final,
+ Generator,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ NoReturn,
+ Optional,
+ Pattern,
+ Set,
+ Sized,
+ Tuple,
+ Type,
+ TypedDict,
+ Union,
+ cast,
+)
+
+from yarl import URL, __version__ as yarl_version
+
+from . import hdrs
+from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
+from .helpers import DEBUG
+from .http import HttpVersion11
+from .typedefs import Handler, PathLike
+from .web_exceptions import (
+ HTTPException,
+ HTTPExpectationFailed,
+ HTTPForbidden,
+ HTTPMethodNotAllowed,
+ HTTPNotFound,
+)
+from .web_fileresponse import FileResponse
+from .web_request import Request
+from .web_response import Response, StreamResponse
+from .web_routedef import AbstractRouteDef
+
+__all__ = (
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "AbstractResource",
+ "Resource",
+ "PlainResource",
+ "DynamicResource",
+ "AbstractRoute",
+ "ResourceRoute",
+ "StaticResource",
+ "View",
+)
+
+
+if TYPE_CHECKING:
+ from .web_app import Application
+
+ BaseDict = Dict[str, str]
+else:
+ BaseDict = dict
+
+CIRCULAR_SYMLINK_ERROR = (
+ (OSError,)
+ if sys.version_info < (3, 10) and sys.platform.startswith("win32")
+ else (RuntimeError,) if sys.version_info < (3, 13) else ()
+)
+
+YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
+
+HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
+ r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
+)
+ROUTE_RE: Final[Pattern[str]] = re.compile(
+ r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
+)
+PATH_SEP: Final[str] = re.escape("/")
+
+IS_WINDOWS: Final[bool] = platform.system() == "Windows"
+
+_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]]
+_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
+
+html_escape = functools.partial(html.escape, quote=True)
+
+
+class _InfoDict(TypedDict, total=False):
+ path: str
+
+ formatter: str
+ pattern: Pattern[str]
+
+ directory: Path
+ prefix: str
+ routes: Mapping[str, "AbstractRoute"]
+
+ app: "Application"
+
+ domain: str
+
+ rule: "AbstractRuleMatching"
+
+ http_exception: HTTPException
+
+
+class AbstractResource(Sized, Iterable["AbstractRoute"]):
+ def __init__(self, *, name: Optional[str] = None) -> None:
+ self._name = name
+
+ @property
+ def name(self) -> Optional[str]:
+ return self._name
+
+ @property
+ @abc.abstractmethod
+ def canonical(self) -> str:
+ """Exposes the resource's canonical path.
+
+ For example '/foo/bar/{name}'
+
+ """
+
+ @abc.abstractmethod # pragma: no branch
+ def url_for(self, **kwargs: str) -> URL:
+ """Construct url for resource with additional params."""
+
+ @abc.abstractmethod # pragma: no branch
+ async def resolve(self, request: Request) -> _Resolve:
+ """Resolve resource.
+
+ Return (UrlMappingMatchInfo, allowed_methods) pair.
+ """
+
+ @abc.abstractmethod
+ def add_prefix(self, prefix: str) -> None:
+ """Add a prefix to processed URLs.
+
+ Required for subapplications support.
+ """
+
+ @abc.abstractmethod
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ def freeze(self) -> None:
+ pass
+
+ @abc.abstractmethod
+ def raw_match(self, path: str) -> bool:
+ """Perform a raw match against path"""
+
+
+class AbstractRoute(abc.ABC):
+ def __init__(
+ self,
+ method: str,
+ handler: Union[Handler, Type[AbstractView]],
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ resource: Optional[AbstractResource] = None,
+ ) -> None:
+
+ if expect_handler is None:
+ expect_handler = _default_expect_handler
+
+ assert inspect.iscoroutinefunction(expect_handler) or (
+ sys.version_info < (3, 14) and asyncio.iscoroutinefunction(expect_handler)
+ ), f"Coroutine is expected, got {expect_handler!r}"
+
+ method = method.upper()
+ if not HTTP_METHOD_RE.match(method):
+ raise ValueError(f"{method} is not allowed HTTP method")
+
+ assert callable(handler), handler
+ if inspect.iscoroutinefunction(handler) or (
+ sys.version_info < (3, 14) and asyncio.iscoroutinefunction(handler)
+ ):
+ pass
+ elif inspect.isgeneratorfunction(handler):
+ if TYPE_CHECKING:
+ assert False
+ warnings.warn(
+ "Bare generators are deprecated, use @coroutine wrapper",
+ DeprecationWarning,
+ )
+ elif isinstance(handler, type) and issubclass(handler, AbstractView):
+ pass
+ else:
+ warnings.warn(
+ "Bare functions are deprecated, use async ones", DeprecationWarning
+ )
+
+ @wraps(handler)
+ async def handler_wrapper(request: Request) -> StreamResponse:
+ result = old_handler(request) # type: ignore[call-arg]
+ if asyncio.iscoroutine(result):
+ result = await result
+ assert isinstance(result, StreamResponse)
+ return result
+
+ old_handler = handler
+ handler = handler_wrapper
+
+ self._method = method
+ self._handler = handler
+ self._expect_handler = expect_handler
+ self._resource = resource
+
+ @property
+ def method(self) -> str:
+ return self._method
+
+ @property
+ def handler(self) -> Handler:
+ return self._handler
+
+ @property
+ @abc.abstractmethod
+ def name(self) -> Optional[str]:
+ """Optional route's name, always equals to resource's name."""
+
+ @property
+ def resource(self) -> Optional[AbstractResource]:
+ return self._resource
+
+ @abc.abstractmethod
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ @abc.abstractmethod # pragma: no branch
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ """Construct url for route with additional params."""
+
+ async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]:
+ return await self._expect_handler(request)
+
+
+class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
+
+ __slots__ = ("_route", "_apps", "_current_app", "_frozen")
+
+ def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None:
+ super().__init__(match_dict)
+ self._route = route
+ self._apps: List[Application] = []
+ self._current_app: Optional[Application] = None
+ self._frozen = False
+
+ @property
+ def handler(self) -> Handler:
+ return self._route.handler
+
+ @property
+ def route(self) -> AbstractRoute:
+ return self._route
+
+ @property
+ def expect_handler(self) -> _ExpectHandler:
+ return self._route.handle_expect_header
+
+ @property
+ def http_exception(self) -> Optional[HTTPException]:
+ return None
+
+ def get_info(self) -> _InfoDict: # type: ignore[override]
+ return self._route.get_info()
+
+ @property
+ def apps(self) -> Tuple["Application", ...]:
+ return tuple(self._apps)
+
+ def add_app(self, app: "Application") -> None:
+ if self._frozen:
+ raise RuntimeError("Cannot change apps stack after .freeze() call")
+ if self._current_app is None:
+ self._current_app = app
+ self._apps.insert(0, app)
+
+ @property
+ def current_app(self) -> "Application":
+ app = self._current_app
+ assert app is not None
+ return app
+
+ @current_app.setter
+ def current_app(self, app: "Application") -> None:
+ if DEBUG: # pragma: no cover
+ if app not in self._apps:
+ raise RuntimeError(
+ "Expected one of the following apps {!r}, got {!r}".format(
+ self._apps, app
+ )
+ )
+ self._current_app = app
+
+ def freeze(self) -> None:
+ self._frozen = True
+
+ def __repr__(self) -> str:
+ return f"<MatchInfo {super().__repr__()}: {self._route}>"
+
+
+class MatchInfoError(UrlMappingMatchInfo):
+
+ __slots__ = ("_exception",)
+
+ def __init__(self, http_exception: HTTPException) -> None:
+ self._exception = http_exception
+ super().__init__({}, SystemRoute(self._exception))
+
+ @property
+ def http_exception(self) -> HTTPException:
+ return self._exception
+
+ def __repr__(self) -> str:
+ return "<MatchInfoError {}: {}>".format(
+ self._exception.status, self._exception.reason
+ )
+
+
+async def _default_expect_handler(request: Request) -> None:
+ """Default handler for Expect header.
+
+ Just send "100 Continue" to client.
+ raise HTTPExpectationFailed if value of header is not "100-continue"
+ """
+ expect = request.headers.get(hdrs.EXPECT, "")
+ if request.version == HttpVersion11:
+ if expect.lower() == "100-continue":
+ await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
+ # Reset output_size as we haven't started the main body yet.
+ request.writer.output_size = 0
+ else:
+ raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
+
+
+class Resource(AbstractResource):
+ def __init__(self, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ self._routes: Dict[str, ResourceRoute] = {}
+ self._any_route: Optional[ResourceRoute] = None
+ self._allowed_methods: Set[str] = set()
+
+ def add_route(
+ self,
+ method: str,
+ handler: Union[Type[AbstractView], Handler],
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> "ResourceRoute":
+ if route := self._routes.get(method, self._any_route):
+ raise RuntimeError(
+ "Added route will never be executed, "
+ f"method {route.method} is already "
+ "registered"
+ )
+
+ route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
+ self.register_route(route_obj)
+ return route_obj
+
+ def register_route(self, route: "ResourceRoute") -> None:
+ assert isinstance(
+ route, ResourceRoute
+ ), f"Instance of Route class is required, got {route!r}"
+ if route.method == hdrs.METH_ANY:
+ self._any_route = route
+ self._allowed_methods.add(route.method)
+ self._routes[route.method] = route
+
+ async def resolve(self, request: Request) -> _Resolve:
+ if (match_dict := self._match(request.rel_url.path_safe)) is None:
+ return None, set()
+ if route := self._routes.get(request.method, self._any_route):
+ return UrlMappingMatchInfo(match_dict, route), self._allowed_methods
+ return None, self._allowed_methods
+
+ @abc.abstractmethod
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ pass # pragma: no cover
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator["ResourceRoute"]:
+ return iter(self._routes.values())
+
+ # TODO: implement all abstract methods
+
+
+class PlainResource(Resource):
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ assert not path or path.startswith("/")
+ self._path = path
+
+ @property
+ def canonical(self) -> str:
+ return self._path
+
+ def freeze(self) -> None:
+ if not self._path:
+ self._path = "/"
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._path = prefix + self._path
+
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ # string comparison is about 10 times faster than regexp matching
+ if self._path == path:
+ return {}
+ return None
+
+ def raw_match(self, path: str) -> bool:
+ return self._path == path
+
+ def get_info(self) -> _InfoDict:
+ return {"path": self._path}
+
+ def url_for(self) -> URL: # type: ignore[override]
+ return URL.build(path=self._path, encoded=True)
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "' " if self.name is not None else ""
+ return f"<PlainResource {name} {self._path}>"
+
+
+class DynamicResource(Resource):
+
+ DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
+ DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
+ GOOD = r"[^{}/]+"
+
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ self._orig_path = path
+ pattern = ""
+ formatter = ""
+ for part in ROUTE_RE.split(path):
+ match = self.DYN.fullmatch(part)
+ if match:
+ pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
+ formatter += "{" + match.group("var") + "}"
+ continue
+
+ match = self.DYN_WITH_RE.fullmatch(part)
+ if match:
+ pattern += "(?P<{var}>{re})".format(**match.groupdict())
+ formatter += "{" + match.group("var") + "}"
+ continue
+
+ if "{" in part or "}" in part:
+ raise ValueError(f"Invalid path '{path}'['{part}']")
+
+ part = _requote_path(part)
+ formatter += part
+ pattern += re.escape(part)
+
+ try:
+ compiled = re.compile(pattern)
+ except re.error as exc:
+ raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
+ assert compiled.pattern.startswith(PATH_SEP)
+ assert formatter.startswith("/")
+ self._pattern = compiled
+ self._formatter = formatter
+
+ @property
+ def canonical(self) -> str:
+ return self._formatter
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
+ self._formatter = prefix + self._formatter
+
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ match = self._pattern.fullmatch(path)
+ if match is None:
+ return None
+ return {
+ key: _unquote_path_safe(value) for key, value in match.groupdict().items()
+ }
+
+ def raw_match(self, path: str) -> bool:
+ return self._orig_path == path
+
+ def get_info(self) -> _InfoDict:
+ return {"formatter": self._formatter, "pattern": self._pattern}
+
+ def url_for(self, **parts: str) -> URL:
+ url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
+ return URL.build(path=url, encoded=True)
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "' " if self.name is not None else ""
+ return "<DynamicResource {name} {formatter}>".format(
+ name=name, formatter=self._formatter
+ )
+
+
+class PrefixResource(AbstractResource):
+ def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
+ assert not prefix or prefix.startswith("/"), prefix
+ assert prefix in ("", "/") or not prefix.endswith("/"), prefix
+ super().__init__(name=name)
+ self._prefix = _requote_path(prefix)
+ self._prefix2 = self._prefix + "/"
+
+ @property
+ def canonical(self) -> str:
+ return self._prefix
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._prefix = prefix + self._prefix
+ self._prefix2 = self._prefix + "/"
+
+ def raw_match(self, prefix: str) -> bool:
+ return False
+
+ # TODO: impl missing abstract methods
+
+
+class StaticResource(PrefixResource):
+ VERSION_KEY = "v"
+
+ def __init__(
+ self,
+ prefix: str,
+ directory: PathLike,
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ chunk_size: int = 256 * 1024,
+ show_index: bool = False,
+ follow_symlinks: bool = False,
+ append_version: bool = False,
+ ) -> None:
+ super().__init__(prefix, name=name)
+ try:
+ directory = Path(directory).expanduser().resolve(strict=True)
+ except FileNotFoundError as error:
+ raise ValueError(f"'{directory}' does not exist") from error
+ if not directory.is_dir():
+ raise ValueError(f"'{directory}' is not a directory")
+ self._directory = directory
+ self._show_index = show_index
+ self._chunk_size = chunk_size
+ self._follow_symlinks = follow_symlinks
+ self._expect_handler = expect_handler
+ self._append_version = append_version
+
+ self._routes = {
+ "GET": ResourceRoute(
+ "GET", self._handle, self, expect_handler=expect_handler
+ ),
+ "HEAD": ResourceRoute(
+ "HEAD", self._handle, self, expect_handler=expect_handler
+ ),
+ }
+ self._allowed_methods = set(self._routes)
+
+ def url_for( # type: ignore[override]
+ self,
+ *,
+ filename: PathLike,
+ append_version: Optional[bool] = None,
+ ) -> URL:
+ if append_version is None:
+ append_version = self._append_version
+ filename = str(filename).lstrip("/")
+
+ url = URL.build(path=self._prefix, encoded=True)
+ # filename is not encoded
+ if YARL_VERSION < (1, 6):
+ url = url / filename.replace("%", "%25")
+ else:
+ url = url / filename
+
+ if append_version:
+ unresolved_path = self._directory.joinpath(filename)
+ try:
+ if self._follow_symlinks:
+ normalized_path = Path(os.path.normpath(unresolved_path))
+ normalized_path.relative_to(self._directory)
+ filepath = normalized_path.resolve()
+ else:
+ filepath = unresolved_path.resolve()
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError):
+ # ValueError for case when path point to symlink
+ # with follow_symlinks is False
+ return url # relatively safe
+ if filepath.is_file():
+ # TODO cache file content
+ # with file watcher for cache invalidation
+ with filepath.open("rb") as f:
+ file_bytes = f.read()
+ h = self._get_file_hash(file_bytes)
+ url = url.with_query({self.VERSION_KEY: h})
+ return url
+ return url
+
+ @staticmethod
+ def _get_file_hash(byte_array: bytes) -> str:
+ m = hashlib.sha256() # todo sha256 can be configurable param
+ m.update(byte_array)
+ b64 = base64.urlsafe_b64encode(m.digest())
+ return b64.decode("ascii")
+
+ def get_info(self) -> _InfoDict:
+ return {
+ "directory": self._directory,
+ "prefix": self._prefix,
+ "routes": self._routes,
+ }
+
+ def set_options_route(self, handler: Handler) -> None:
+ if "OPTIONS" in self._routes:
+ raise RuntimeError("OPTIONS route was set already")
+ self._routes["OPTIONS"] = ResourceRoute(
+ "OPTIONS", handler, self, expect_handler=self._expect_handler
+ )
+ self._allowed_methods.add("OPTIONS")
+
+ async def resolve(self, request: Request) -> _Resolve:
+ path = request.rel_url.path_safe
+ method = request.method
+ # We normalise here to avoid matches that traverse below the static root.
+ # e.g. /static/../../../../home/user/webapp/static/
+ norm_path = os.path.normpath(path)
+ if IS_WINDOWS:
+ norm_path = norm_path.replace("\\", "/")
+ if not norm_path.startswith(self._prefix2) and norm_path != self._prefix:
+ return None, set()
+
+ allowed_methods = self._allowed_methods
+ if method not in allowed_methods:
+ return None, allowed_methods
+
+ match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])}
+ return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._routes.values())
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ filename = request.match_info["filename"]
+ unresolved_path = self._directory.joinpath(filename)
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(
+ None, self._resolve_path_to_response, unresolved_path
+ )
+
+ def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse:
+ """Take the unresolved path and query the file system to form a response."""
+ # Check for access outside the root directory. For follow symlinks, URI
+ # cannot traverse out, but symlinks can. Otherwise, no access outside
+ # root is permitted.
+ try:
+ if self._follow_symlinks:
+ normalized_path = Path(os.path.normpath(unresolved_path))
+ normalized_path.relative_to(self._directory)
+ file_path = normalized_path.resolve()
+ else:
+ file_path = unresolved_path.resolve()
+ file_path.relative_to(self._directory)
+ except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error:
+ # ValueError is raised for the relative check. Circular symlinks
+ # raise here on resolving for python < 3.13.
+ raise HTTPNotFound() from error
+
+ # if path is a directory, return the contents if permitted. Note the
+ # directory check will raise if a segment is not readable.
+ try:
+ if file_path.is_dir():
+ if self._show_index:
+ return Response(
+ text=self._directory_as_html(file_path),
+ content_type="text/html",
+ )
+ else:
+ raise HTTPForbidden()
+ except PermissionError as error:
+ raise HTTPForbidden() from error
+
+ # Return the file response, which handles all other checks.
+ return FileResponse(file_path, chunk_size=self._chunk_size)
+
+ def _directory_as_html(self, dir_path: Path) -> str:
+ """returns directory's index as html."""
+ assert dir_path.is_dir()
+
+ relative_path_to_dir = dir_path.relative_to(self._directory).as_posix()
+ index_of = f"Index of /{html_escape(relative_path_to_dir)}"
+ h1 = f"<h1>{index_of}</h1>"
+
+ index_list = []
+ dir_index = dir_path.iterdir()
+ for _file in sorted(dir_index):
+ # show file url as relative to static path
+ rel_path = _file.relative_to(self._directory).as_posix()
+ quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}")
+
+ # if file is a directory, add '/' to the end of the name
+ if _file.is_dir():
+ file_name = f"{_file.name}/"
+ else:
+ file_name = _file.name
+
+ index_list.append(
+ f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>'
+ )
+ ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
+ body = f"<body>\n{h1}\n{ul}\n</body>"
+
+ head_str = f"<head>\n<title>{index_of}</title>\n</head>"
+ html = f"<html>\n{head_str}\n{body}\n</html>"
+
+ return html
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "'" if self.name is not None else ""
+ return "<StaticResource {name} {path} -> {directory!r}>".format(
+ name=name, path=self._prefix, directory=self._directory
+ )
+
+
+class PrefixedSubAppResource(PrefixResource):
+ def __init__(self, prefix: str, app: "Application") -> None:
+ super().__init__(prefix)
+ self._app = app
+ self._add_prefix_to_resources(prefix)
+
+ def add_prefix(self, prefix: str) -> None:
+ super().add_prefix(prefix)
+ self._add_prefix_to_resources(prefix)
+
+ def _add_prefix_to_resources(self, prefix: str) -> None:
+ router = self._app.router
+ for resource in router.resources():
+ # Since the canonical path of a resource is about
+ # to change, we need to unindex it and then reindex
+ router.unindex_resource(resource)
+ resource.add_prefix(prefix)
+ router.index_resource(resource)
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ raise RuntimeError(".url_for() is not supported by sub-application root")
+
+ def get_info(self) -> _InfoDict:
+ return {"app": self._app, "prefix": self._prefix}
+
+ async def resolve(self, request: Request) -> _Resolve:
+ match_info = await self._app.router.resolve(request)
+ match_info.add_app(self._app)
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
+ methods = match_info.http_exception.allowed_methods
+ else:
+ methods = set()
+ return match_info, methods
+
+ def __len__(self) -> int:
+ return len(self._app.router.routes())
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._app.router.routes())
+
+ def __repr__(self) -> str:
+ return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
+ prefix=self._prefix, app=self._app
+ )
+
+
+class AbstractRuleMatching(abc.ABC):
+ @abc.abstractmethod # pragma: no branch
+ async def match(self, request: Request) -> bool:
+ """Return bool if the request satisfies the criteria"""
+
+ @abc.abstractmethod # pragma: no branch
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ @property
+ @abc.abstractmethod # pragma: no branch
+ def canonical(self) -> str:
+ """Return a str"""
+
+
+class Domain(AbstractRuleMatching):
+ re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
+
+ def __init__(self, domain: str) -> None:
+ super().__init__()
+ self._domain = self.validation(domain)
+
+ @property
+ def canonical(self) -> str:
+ return self._domain
+
+ def validation(self, domain: str) -> str:
+ if not isinstance(domain, str):
+ raise TypeError("Domain must be str")
+ domain = domain.rstrip(".").lower()
+ if not domain:
+ raise ValueError("Domain cannot be empty")
+ elif "://" in domain:
+ raise ValueError("Scheme not supported")
+ url = URL("http://" + domain)
+ assert url.raw_host is not None
+ if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
+ raise ValueError("Domain not valid")
+ if url.port == 80:
+ return url.raw_host
+ return f"{url.raw_host}:{url.port}"
+
+ async def match(self, request: Request) -> bool:
+ host = request.headers.get(hdrs.HOST)
+ if not host:
+ return False
+ return self.match_domain(host)
+
+ def match_domain(self, host: str) -> bool:
+ return host.lower() == self._domain
+
+ def get_info(self) -> _InfoDict:
+ return {"domain": self._domain}
+
+
+class MaskDomain(Domain):
+ re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
+
+ def __init__(self, domain: str) -> None:
+ super().__init__(domain)
+ mask = self._domain.replace(".", r"\.").replace("*", ".*")
+ self._mask = re.compile(mask)
+
+ @property
+ def canonical(self) -> str:
+ return self._mask.pattern
+
+ def match_domain(self, host: str) -> bool:
+ return self._mask.fullmatch(host) is not None
+
+
+class MatchedSubAppResource(PrefixedSubAppResource):
+ def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
+ AbstractResource.__init__(self)
+ self._prefix = ""
+ self._app = app
+ self._rule = rule
+
+ @property
+ def canonical(self) -> str:
+ return self._rule.canonical
+
+ def get_info(self) -> _InfoDict:
+ return {"app": self._app, "rule": self._rule}
+
+ async def resolve(self, request: Request) -> _Resolve:
+ if not await self._rule.match(request):
+ return None, set()
+ match_info = await self._app.router.resolve(request)
+ match_info.add_app(self._app)
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
+ methods = match_info.http_exception.allowed_methods
+ else:
+ methods = set()
+ return match_info, methods
+
+ def __repr__(self) -> str:
+ return f"<MatchedSubAppResource -> {self._app!r}>"
+
+
+class ResourceRoute(AbstractRoute):
+ """A route with resource"""
+
+ def __init__(
+ self,
+ method: str,
+ handler: Union[Handler, Type[AbstractView]],
+ resource: AbstractResource,
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> None:
+ super().__init__(
+ method, handler, expect_handler=expect_handler, resource=resource
+ )
+
+ def __repr__(self) -> str:
+ return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
+ method=self.method, resource=self._resource, handler=self.handler
+ )
+
+ @property
+ def name(self) -> Optional[str]:
+ if self._resource is None:
+ return None
+ return self._resource.name
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ """Construct url for route with additional params."""
+ assert self._resource is not None
+ return self._resource.url_for(*args, **kwargs)
+
+ def get_info(self) -> _InfoDict:
+ assert self._resource is not None
+ return self._resource.get_info()
+
+
+class SystemRoute(AbstractRoute):
+ def __init__(self, http_exception: HTTPException) -> None:
+ super().__init__(hdrs.METH_ANY, self._handle)
+ self._http_exception = http_exception
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ raise RuntimeError(".url_for() is not allowed for SystemRoute")
+
+ @property
+ def name(self) -> Optional[str]:
+ return None
+
+ def get_info(self) -> _InfoDict:
+ return {"http_exception": self._http_exception}
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ raise self._http_exception
+
+ @property
+ def status(self) -> int:
+ return self._http_exception.status
+
+ @property
+ def reason(self) -> str:
+ return self._http_exception.reason
+
+ def __repr__(self) -> str:
+ return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
+
+
+class View(AbstractView):
+ async def _iter(self) -> StreamResponse:
+ if self.request.method not in hdrs.METH_ALL:
+ self._raise_allowed_methods()
+ method: Optional[Callable[[], Awaitable[StreamResponse]]]
+ method = getattr(self, self.request.method.lower(), None)
+ if method is None:
+ self._raise_allowed_methods()
+ ret = await method()
+ assert isinstance(ret, StreamResponse)
+ return ret
+
+ def __await__(self) -> Generator[None, None, StreamResponse]:
+ return self._iter().__await__()
+
+ def _raise_allowed_methods(self) -> NoReturn:
+ allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
+ raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
+
+
+class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
+ def __init__(self, resources: List[AbstractResource]) -> None:
+ self._resources = resources
+
+ def __len__(self) -> int:
+ return len(self._resources)
+
+ def __iter__(self) -> Iterator[AbstractResource]:
+ yield from self._resources
+
+ def __contains__(self, resource: object) -> bool:
+ return resource in self._resources
+
+
+class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
+ def __init__(self, resources: List[AbstractResource]):
+ self._routes: List[AbstractRoute] = []
+ for resource in resources:
+ for route in resource:
+ self._routes.append(route)
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ yield from self._routes
+
+ def __contains__(self, route: object) -> bool:
+ return route in self._routes
+
+
+class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
+
+ NAME_SPLIT_RE = re.compile(r"[.:-]")
+
+ def __init__(self) -> None:
+ super().__init__()
+ self._resources: List[AbstractResource] = []
+ self._named_resources: Dict[str, AbstractResource] = {}
+ self._resource_index: dict[str, list[AbstractResource]] = {}
+ self._matched_sub_app_resources: List[MatchedSubAppResource] = []
+
+ async def resolve(self, request: Request) -> UrlMappingMatchInfo:
+ resource_index = self._resource_index
+ allowed_methods: Set[str] = set()
+
+ # MatchedSubAppResource is primarily used to match on domain names
+ # (though custom rules could match on other things). This means that
+ # the traversal algorithm below can't be applied, and that we likely
+ # need to check these first so a sub app that defines the same path
+ # as a parent app will get priority if there's a domain match.
+ #
+ # For most cases we do not expect there to be many of these since
+ # currently they are only added by `.add_domain()`.
+ for resource in self._matched_sub_app_resources:
+ match_dict, allowed = await resource.resolve(request)
+ if match_dict is not None:
+ return match_dict
+ else:
+ allowed_methods |= allowed
+
+ # Walk the url parts looking for candidates. We walk the url backwards
+ # to ensure the most explicit match is found first. If there are multiple
+ # candidates for a given url part because there are multiple resources
+ # registered for the same canonical path, we resolve them in a linear
+ # fashion to ensure registration order is respected.
+ url_part = request.rel_url.path_safe
+ while url_part:
+ for candidate in resource_index.get(url_part, ()):
+ match_dict, allowed = await candidate.resolve(request)
+ if match_dict is not None:
+ return match_dict
+ else:
+ allowed_methods |= allowed
+ if url_part == "/":
+ break
+ url_part = url_part.rpartition("/")[0] or "/"
+
+ if allowed_methods:
+ return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods))
+
+ return MatchInfoError(HTTPNotFound())
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._named_resources)
+
+ def __len__(self) -> int:
+ return len(self._named_resources)
+
+ def __contains__(self, resource: object) -> bool:
+ return resource in self._named_resources
+
+ def __getitem__(self, name: str) -> AbstractResource:
+ return self._named_resources[name]
+
+ def resources(self) -> ResourcesView:
+ return ResourcesView(self._resources)
+
+ def routes(self) -> RoutesView:
+ return RoutesView(self._resources)
+
+ def named_resources(self) -> Mapping[str, AbstractResource]:
+ return MappingProxyType(self._named_resources)
+
+ def register_resource(self, resource: AbstractResource) -> None:
+ assert isinstance(
+ resource, AbstractResource
+ ), f"Instance of AbstractResource class is required, got {resource!r}"
+ if self.frozen:
+ raise RuntimeError("Cannot register a resource into frozen router.")
+
+ name = resource.name
+
+ if name is not None:
+ parts = self.NAME_SPLIT_RE.split(name)
+ for part in parts:
+ if keyword.iskeyword(part):
+ raise ValueError(
+ f"Incorrect route name {name!r}, "
+ "python keywords cannot be used "
+ "for route name"
+ )
+ if not part.isidentifier():
+ raise ValueError(
+ "Incorrect route name {!r}, "
+ "the name should be a sequence of "
+ "python identifiers separated "
+ "by dash, dot or column".format(name)
+ )
+ if name in self._named_resources:
+ raise ValueError(
+ "Duplicate {!r}, "
+ "already handled by {!r}".format(name, self._named_resources[name])
+ )
+ self._named_resources[name] = resource
+ self._resources.append(resource)
+
+ if isinstance(resource, MatchedSubAppResource):
+ # We cannot index match sub-app resources because they have match rules
+ self._matched_sub_app_resources.append(resource)
+ else:
+ self.index_resource(resource)
+
+ def _get_resource_index_key(self, resource: AbstractResource) -> str:
+ """Return a key to index the resource in the resource index."""
+ if "{" in (index_key := resource.canonical):
+ # strip at the first { to allow for variables, and than
+ # rpartition at / to allow for variable parts in the path
+ # For example if the canonical path is `/core/locations{tail:.*}`
+ # the index key will be `/core` since index is based on the
+ # url parts split by `/`
+ index_key = index_key.partition("{")[0].rpartition("/")[0]
+ return index_key.rstrip("/") or "/"
+
+ def index_resource(self, resource: AbstractResource) -> None:
+ """Add a resource to the resource index."""
+ resource_key = self._get_resource_index_key(resource)
+ # There may be multiple resources for a canonical path
+ # so we keep them in a list to ensure that registration
+ # order is respected.
+ self._resource_index.setdefault(resource_key, []).append(resource)
+
+ def unindex_resource(self, resource: AbstractResource) -> None:
+ """Remove a resource from the resource index."""
+ resource_key = self._get_resource_index_key(resource)
+ self._resource_index[resource_key].remove(resource)
+
+ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
+ if path and not path.startswith("/"):
+ raise ValueError("path should be started with / or be empty")
+ # Reuse last added resource if path and name are the same
+ if self._resources:
+ resource = self._resources[-1]
+ if resource.name == name and resource.raw_match(path):
+ return cast(Resource, resource)
+ if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
+ resource = PlainResource(path, name=name)
+ self.register_resource(resource)
+ return resource
+ resource = DynamicResource(path, name=name)
+ self.register_resource(resource)
+ return resource
+
+ def add_route(
+ self,
+ method: str,
+ path: str,
+ handler: Union[Handler, Type[AbstractView]],
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> AbstractRoute:
+ resource = self.add_resource(path, name=name)
+ return resource.add_route(method, handler, expect_handler=expect_handler)
+
+ def add_static(
+ self,
+ prefix: str,
+ path: PathLike,
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ chunk_size: int = 256 * 1024,
+ show_index: bool = False,
+ follow_symlinks: bool = False,
+ append_version: bool = False,
+ ) -> AbstractResource:
+ """Add static files view.
+
+ prefix - url prefix
+ path - folder with files
+
+ """
+ assert prefix.startswith("/")
+ if prefix.endswith("/"):
+ prefix = prefix[:-1]
+ resource = StaticResource(
+ prefix,
+ path,
+ name=name,
+ expect_handler=expect_handler,
+ chunk_size=chunk_size,
+ show_index=show_index,
+ follow_symlinks=follow_symlinks,
+ append_version=append_version,
+ )
+ self.register_resource(resource)
+ return resource
+
+ def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method HEAD."""
+ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+ def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method OPTIONS."""
+ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+ def add_get(
+ self,
+ path: str,
+ handler: Handler,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+ ) -> AbstractRoute:
+ """Shortcut for add_route with method GET.
+
+ If allow_head is true, another
+ route is added allowing head requests to the same endpoint.
+ """
+ resource = self.add_resource(path, name=name)
+ if allow_head:
+ resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
+ return resource.add_route(hdrs.METH_GET, handler, **kwargs)
+
+ def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method POST."""
+ return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
+
+ def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method PUT."""
+ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
+
+ def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method PATCH."""
+ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+ def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method DELETE."""
+ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+ def add_view(
+ self, path: str, handler: Type[AbstractView], **kwargs: Any
+ ) -> AbstractRoute:
+ """Shortcut for add_route with ANY methods for a class-based view."""
+ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
+
+ def freeze(self) -> None:
+ super().freeze()
+ for resource in self._resources:
+ resource.freeze()
+
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
+ """Append routes to route table.
+
+ Parameter should be a sequence of RouteDef objects.
+
+ Returns a list of registered AbstractRoute instances.
+ """
+ registered_routes = []
+ for route_def in routes:
+ registered_routes.extend(route_def.register(self))
+ return registered_routes
+
+
+def _quote_path(value: str) -> str:
+ if YARL_VERSION < (1, 6):
+ value = value.replace("%", "%25")
+ return URL.build(path=value, encoded=False).raw_path
+
+
+def _unquote_path_safe(value: str) -> str:
+ if "%" not in value:
+ return value
+ return value.replace("%2F", "/").replace("%25", "%")
+
+
+def _requote_path(value: str) -> str:
+ # Quote non-ascii characters and other characters which must be quoted,
+ # but preserve existing %-sequences.
+ result = _quote_path(value)
+ if "%" in value:
+ result = result.replace("%25", "%")
+ return result
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_ws.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_ws.py"
new file mode 100644
index 0000000..575f9a3
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/web_ws.py"
@@ -0,0 +1,631 @@
+import asyncio
+import base64
+import binascii
+import hashlib
+import json
+import sys
+from typing import Any, Final, Iterable, Optional, Tuple, Union, cast
+
+import attr
+from multidict import CIMultiDict
+
+from . import hdrs
+from ._websocket.reader import WebSocketDataQueue
+from ._websocket.writer import DEFAULT_LIMIT
+from .abc import AbstractStreamWriter
+from .client_exceptions import WSMessageTypeError
+from .helpers import calculate_timeout_when, set_exception, set_result
+from .http import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WS_KEY,
+ WebSocketError,
+ WebSocketReader,
+ WebSocketWriter,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType as WSMsgType,
+ ws_ext_gen,
+ ws_ext_parse,
+)
+from .http_websocket import _INTERNAL_RECEIVE_TYPES
+from .log import ws_logger
+from .streams import EofStream
+from .typedefs import JSONDecoder, JSONEncoder
+from .web_exceptions import HTTPBadRequest, HTTPException
+from .web_request import BaseRequest
+from .web_response import StreamResponse
+
+if sys.version_info >= (3, 11):
+ import asyncio as async_timeout
+else:
+ import async_timeout
+
+__all__ = (
+ "WebSocketResponse",
+ "WebSocketReady",
+ "WSMsgType",
+)
+
+THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class WebSocketReady:
+ ok: bool
+ protocol: Optional[str]
+
+ def __bool__(self) -> bool:
+ return self.ok
+
+
+class WebSocketResponse(StreamResponse):
+
+ _length_check: bool = False
+ _ws_protocol: Optional[str] = None
+ _writer: Optional[WebSocketWriter] = None
+ _reader: Optional[WebSocketDataQueue] = None
+ _closed: bool = False
+ _closing: bool = False
+ _conn_lost: int = 0
+ _close_code: Optional[int] = None
+ _loop: Optional[asyncio.AbstractEventLoop] = None
+ _waiting: bool = False
+ _close_wait: Optional[asyncio.Future[None]] = None
+ _exception: Optional[BaseException] = None
+ _heartbeat_when: float = 0.0
+ _heartbeat_cb: Optional[asyncio.TimerHandle] = None
+ _pong_response_cb: Optional[asyncio.TimerHandle] = None
+ _ping_task: Optional[asyncio.Task[None]] = None
+
+ def __init__(
+ self,
+ *,
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ protocols: Iterable[str] = (),
+ compress: bool = True,
+ max_msg_size: int = 4 * 1024 * 1024,
+ writer_limit: int = DEFAULT_LIMIT,
+ ) -> None:
+ super().__init__(status=101)
+ self._protocols = protocols
+ self._timeout = timeout
+ self._receive_timeout = receive_timeout
+ self._autoclose = autoclose
+ self._autoping = autoping
+ self._heartbeat = heartbeat
+ if heartbeat is not None:
+ self._pong_heartbeat = heartbeat / 2.0
+ self._compress: Union[bool, int] = compress
+ self._max_msg_size = max_msg_size
+ self._writer_limit = writer_limit
+
+ def _cancel_heartbeat(self) -> None:
+ self._cancel_pong_response_cb()
+ if self._heartbeat_cb is not None:
+ self._heartbeat_cb.cancel()
+ self._heartbeat_cb = None
+ if self._ping_task is not None:
+ self._ping_task.cancel()
+ self._ping_task = None
+
+ def _cancel_pong_response_cb(self) -> None:
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = None
+
+ def _reset_heartbeat(self) -> None:
+ if self._heartbeat is None:
+ return
+ self._cancel_pong_response_cb()
+ req = self._req
+ timeout_ceil_threshold = (
+ req._protocol._timeout_ceil_threshold if req is not None else 5
+ )
+ loop = self._loop
+ assert loop is not None
+ now = loop.time()
+ when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)
+ self._heartbeat_when = when
+ if self._heartbeat_cb is None:
+ # We do not cancel the previous heartbeat_cb here because
+ # it generates a significant amount of TimerHandle churn
+ # which causes asyncio to rebuild the heap frequently.
+ # Instead _send_heartbeat() will reschedule the next
+ # heartbeat if it fires too early.
+ self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)
+
+ def _send_heartbeat(self) -> None:
+ self._heartbeat_cb = None
+ loop = self._loop
+ assert loop is not None and self._writer is not None
+ now = loop.time()
+ if now < self._heartbeat_when:
+ # Heartbeat fired too early, reschedule
+ self._heartbeat_cb = loop.call_at(
+ self._heartbeat_when, self._send_heartbeat
+ )
+ return
+
+ req = self._req
+ timeout_ceil_threshold = (
+ req._protocol._timeout_ceil_threshold if req is not None else 5
+ )
+ when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)
+ self._cancel_pong_response_cb()
+ self._pong_response_cb = loop.call_at(when, self._pong_not_received)
+
+ coro = self._writer.send_frame(b"", WSMsgType.PING)
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to send the ping
+ # immediately to avoid having to schedule
+ # the task on the event loop.
+ ping_task = asyncio.Task(coro, loop=loop, eager_start=True)
+ else:
+ ping_task = loop.create_task(coro)
+
+ if not ping_task.done():
+ self._ping_task = ping_task
+ ping_task.add_done_callback(self._ping_task_done)
+ else:
+ self._ping_task_done(ping_task)
+
+ def _ping_task_done(self, task: "asyncio.Task[None]") -> None:
+ """Callback for when the ping task completes."""
+ if not task.cancelled() and (exc := task.exception()):
+ self._handle_ping_pong_exception(exc)
+ self._ping_task = None
+
+ def _pong_not_received(self) -> None:
+ if self._req is not None and self._req.transport is not None:
+ self._handle_ping_pong_exception(
+ asyncio.TimeoutError(
+ f"No PONG received after {self._pong_heartbeat} seconds"
+ )
+ )
+
+ def _handle_ping_pong_exception(self, exc: BaseException) -> None:
+ """Handle exceptions raised during ping/pong processing."""
+ if self._closed:
+ return
+ self._set_closed()
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ self._exception = exc
+ if self._waiting and not self._closing and self._reader is not None:
+ self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0)
+
+ def _set_closed(self) -> None:
+ """Set the connection to closed.
+
+ Cancel any heartbeat timers and set the closed flag.
+ """
+ self._closed = True
+ self._cancel_heartbeat()
+
+ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:
+ # make pre-check to don't hide it by do_handshake() exceptions
+ if self._payload_writer is not None:
+ return self._payload_writer
+
+ protocol, writer = self._pre_start(request)
+ payload_writer = await super().prepare(request)
+ assert payload_writer is not None
+ self._post_start(request, protocol, writer)
+ await payload_writer.drain()
+ return payload_writer
+
+ def _handshake(
+ self, request: BaseRequest
+ ) -> Tuple["CIMultiDict[str]", Optional[str], int, bool]:
+ headers = request.headers
+ if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
+ raise HTTPBadRequest(
+ text=(
+ "No WebSocket UPGRADE hdr: {}\n Can "
+ '"Upgrade" only to "WebSocket".'
+ ).format(headers.get(hdrs.UPGRADE))
+ )
+
+ if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
+ raise HTTPBadRequest(
+ text="No CONNECTION upgrade hdr: {}".format(
+ headers.get(hdrs.CONNECTION)
+ )
+ )
+
+ # find common sub-protocol between client and server
+ protocol: Optional[str] = None
+ if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
+ req_protocols = [
+ str(proto.strip())
+ for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
+ ]
+
+ for proto in req_protocols:
+ if proto in self._protocols:
+ protocol = proto
+ break
+ else:
+ # No overlap found: Return no protocol as per spec
+ ws_logger.warning(
+ "%s: Client protocols %r don’t overlap server-known ones %r",
+ request.remote,
+ req_protocols,
+ self._protocols,
+ )
+
+ # check supported version
+ version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
+ if version not in ("13", "8", "7"):
+ raise HTTPBadRequest(text=f"Unsupported version: {version}")
+
+ # check client handshake for validity
+ key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
+ try:
+ if not key or len(base64.b64decode(key)) != 16:
+ raise HTTPBadRequest(text=f"Handshake error: {key!r}")
+ except binascii.Error:
+ raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
+
+ accept_val = base64.b64encode(
+ hashlib.sha1(key.encode() + WS_KEY).digest()
+ ).decode()
+ response_headers = CIMultiDict(
+ {
+ hdrs.UPGRADE: "websocket",
+ hdrs.CONNECTION: "upgrade",
+ hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
+ }
+ )
+
+ notakeover = False
+ compress = 0
+ if self._compress:
+ extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
+ # Server side always get return with no exception.
+ # If something happened, just drop compress extension
+ compress, notakeover = ws_ext_parse(extensions, isserver=True)
+ if compress:
+ enabledext = ws_ext_gen(
+ compress=compress, isserver=True, server_notakeover=notakeover
+ )
+ response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
+
+ if protocol:
+ response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
+ return (
+ response_headers,
+ protocol,
+ compress,
+ notakeover,
+ )
+
+ def _pre_start(self, request: BaseRequest) -> Tuple[Optional[str], WebSocketWriter]:
+ self._loop = request._loop
+
+ headers, protocol, compress, notakeover = self._handshake(request)
+
+ self.set_status(101)
+ self.headers.update(headers)
+ self.force_close()
+ self._compress = compress
+ transport = request._protocol.transport
+ assert transport is not None
+ writer = WebSocketWriter(
+ request._protocol,
+ transport,
+ compress=compress,
+ notakeover=notakeover,
+ limit=self._writer_limit,
+ )
+
+ return protocol, writer
+
+ def _post_start(
+ self, request: BaseRequest, protocol: Optional[str], writer: WebSocketWriter
+ ) -> None:
+ self._ws_protocol = protocol
+ self._writer = writer
+
+ self._reset_heartbeat()
+
+ loop = self._loop
+ assert loop is not None
+ self._reader = WebSocketDataQueue(request._protocol, 2**16, loop=loop)
+ request.protocol.set_parser(
+ WebSocketReader(
+ self._reader, self._max_msg_size, compress=bool(self._compress)
+ )
+ )
+ # disable HTTP keepalive for WebSocket
+ request.protocol.keep_alive(False)
+
+ def can_prepare(self, request: BaseRequest) -> WebSocketReady:
+ if self._writer is not None:
+ raise RuntimeError("Already started")
+ try:
+ _, protocol, _, _ = self._handshake(request)
+ except HTTPException:
+ return WebSocketReady(False, None)
+ else:
+ return WebSocketReady(True, protocol)
+
+ @property
+ def prepared(self) -> bool:
+ return self._writer is not None
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def close_code(self) -> Optional[int]:
+ return self._close_code
+
+ @property
+ def ws_protocol(self) -> Optional[str]:
+ return self._ws_protocol
+
+ @property
+ def compress(self) -> Union[int, bool]:
+ return self._compress
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """Get optional transport information.
+
+ If no value associated with ``name`` is found, ``default`` is returned.
+ """
+ writer = self._writer
+ if writer is None:
+ return default
+ transport = writer.transport
+ if transport is None:
+ return default
+ return transport.get_extra_info(name, default)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ async def ping(self, message: bytes = b"") -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.send_frame(message, WSMsgType.PING)
+
+ async def pong(self, message: bytes = b"") -> None:
+ # unsolicited pong
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.send_frame(message, WSMsgType.PONG)
+
+ async def send_frame(
+ self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None
+ ) -> None:
+ """Send a frame over the websocket."""
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.send_frame(message, opcode, compress)
+
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ if not isinstance(data, str):
+ raise TypeError("data argument must be str (%r)" % type(data))
+ await self._writer.send_frame(
+ data.encode("utf-8"), WSMsgType.TEXT, compress=compress
+ )
+
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
+ await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress)
+
+ async def send_json(
+ self,
+ data: Any,
+ compress: Optional[int] = None,
+ *,
+ dumps: JSONEncoder = json.dumps,
+ ) -> None:
+ await self.send_str(dumps(data), compress=compress)
+
+ async def write_eof(self) -> None: # type: ignore[override]
+ if self._eof_sent:
+ return
+ if self._payload_writer is None:
+ raise RuntimeError("Response has not been started")
+
+ await self.close()
+ self._eof_sent = True
+
+ async def close(
+ self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True
+ ) -> bool:
+ """Close websocket connection."""
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+
+ if self._closed:
+ return False
+ self._set_closed()
+
+ try:
+ await self._writer.close(code, message)
+ writer = self._payload_writer
+ assert writer is not None
+ if drain:
+ await writer.drain()
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ raise
+ except Exception as exc:
+ self._exception = exc
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ return True
+
+ reader = self._reader
+ assert reader is not None
+ # we need to break `receive()` cycle before we can call
+ # `reader.read()` as `close()` may be called from different task
+ if self._waiting:
+ assert self._loop is not None
+ assert self._close_wait is None
+ self._close_wait = self._loop.create_future()
+ reader.feed_data(WS_CLOSING_MESSAGE, 0)
+ await self._close_wait
+
+ if self._closing:
+ self._close_transport()
+ return True
+
+ try:
+ async with async_timeout.timeout(self._timeout):
+ while True:
+ msg = await reader.read()
+ if msg.type is WSMsgType.CLOSE:
+ self._set_code_close_transport(msg.data)
+ return True
+ except asyncio.CancelledError:
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ raise
+ except Exception as exc:
+ self._exception = exc
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ return True
+
+ def _set_closing(self, code: WSCloseCode) -> None:
+ """Set the close code and mark the connection as closing."""
+ self._closing = True
+ self._close_code = code
+ self._cancel_heartbeat()
+
+ def _set_code_close_transport(self, code: WSCloseCode) -> None:
+ """Set the close code and close the transport."""
+ self._close_code = code
+ self._close_transport()
+
+ def _close_transport(self) -> None:
+ """Close the transport."""
+ if self._req is not None and self._req.transport is not None:
+ self._req.transport.close()
+
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
+ if self._reader is None:
+ raise RuntimeError("Call .prepare() first")
+
+ receive_timeout = timeout or self._receive_timeout
+ while True:
+ if self._waiting:
+ raise RuntimeError("Concurrent call to receive() is not allowed")
+
+ if self._closed:
+ self._conn_lost += 1
+ if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
+ raise RuntimeError("WebSocket connection is closed.")
+ return WS_CLOSED_MESSAGE
+ elif self._closing:
+ return WS_CLOSING_MESSAGE
+
+ try:
+ self._waiting = True
+ try:
+ if receive_timeout:
+ # Entering the context manager and creating
+ # Timeout() object can take almost 50% of the
+ # run time in this loop so we avoid it if
+ # there is no read timeout.
+ async with async_timeout.timeout(receive_timeout):
+ msg = await self._reader.read()
+ else:
+ msg = await self._reader.read()
+ self._reset_heartbeat()
+ finally:
+ self._waiting = False
+ if self._close_wait:
+ set_result(self._close_wait, None)
+ except asyncio.TimeoutError:
+ raise
+ except EofStream:
+ self._close_code = WSCloseCode.OK
+ await self.close()
+ return WSMessage(WSMsgType.CLOSED, None, None)
+ except WebSocketError as exc:
+ self._close_code = exc.code
+ await self.close(code=exc.code)
+ return WSMessage(WSMsgType.ERROR, exc, None)
+ except Exception as exc:
+ self._exception = exc
+ self._set_closing(WSCloseCode.ABNORMAL_CLOSURE)
+ await self.close()
+ return WSMessage(WSMsgType.ERROR, exc, None)
+
+ if msg.type not in _INTERNAL_RECEIVE_TYPES:
+ # If its not a close/closing/ping/pong message
+ # we can return it immediately
+ return msg
+
+ if msg.type is WSMsgType.CLOSE:
+ self._set_closing(msg.data)
+ # Could be closed while awaiting reader.
+ if not self._closed and self._autoclose:
+ # The client is likely going to close the
+ # connection out from under us so we do not
+ # want to drain any pending writes as it will
+ # likely result writing to a broken pipe.
+ await self.close(drain=False)
+ elif msg.type is WSMsgType.CLOSING:
+ self._set_closing(WSCloseCode.OK)
+ elif msg.type is WSMsgType.PING and self._autoping:
+ await self.pong(msg.data)
+ continue
+ elif msg.type is WSMsgType.PONG and self._autoping:
+ continue
+
+ return msg
+
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
+ msg = await self.receive(timeout)
+ if msg.type is not WSMsgType.TEXT:
+ raise WSMessageTypeError(
+ f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT"
+ )
+ return cast(str, msg.data)
+
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
+ msg = await self.receive(timeout)
+ if msg.type is not WSMsgType.BINARY:
+ raise WSMessageTypeError(
+ f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY"
+ )
+ return cast(bytes, msg.data)
+
+ async def receive_json(
+ self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
+ ) -> Any:
+ data = await self.receive_str(timeout=timeout)
+ return loads(data)
+
+ async def write(self, data: bytes) -> None:
+ raise RuntimeError("Cannot call .write() for websocket")
+
+ def __aiter__(self) -> "WebSocketResponse":
+ return self
+
+ async def __anext__(self) -> WSMessage:
+ msg = await self.receive()
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
+ raise StopAsyncIteration
+ return msg
+
+ def _cancel(self, exc: BaseException) -> None:
+ # web_protocol calls this from connection_lost
+ # or when the server is shutting down.
+ self._closing = True
+ self._cancel_heartbeat()
+ if self._reader is not None:
+ set_exception(self._reader, exc)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/worker.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/worker.py"
new file mode 100644
index 0000000..f7281bf
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiohttp/worker.py"
@@ -0,0 +1,255 @@
+"""Async gunicorn worker for aiohttp.web"""
+
+import asyncio
+import inspect
+import os
+import re
+import signal
+import sys
+from types import FrameType
+from typing import TYPE_CHECKING, Any, Optional
+
+from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
+from gunicorn.workers import base
+
+from aiohttp import web
+
+from .helpers import set_result
+from .web_app import Application
+from .web_log import AccessLogger
+
+if TYPE_CHECKING:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+else:
+ try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+ except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
+
+
+class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
+
+ DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
+ DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
+
+ def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
+ super().__init__(*args, **kw)
+
+ self._task: Optional[asyncio.Task[None]] = None
+ self.exit_code = 0
+ self._notify_waiter: Optional[asyncio.Future[bool]] = None
+
+ def init_process(self) -> None:
+ # create new event_loop after fork
+ asyncio.get_event_loop().close()
+
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ super().init_process()
+
+ def run(self) -> None:
+ self._task = self.loop.create_task(self._run())
+
+ try: # ignore all finalization problems
+ self.loop.run_until_complete(self._task)
+ except Exception:
+ self.log.exception("Exception in gunicorn worker")
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+
+ sys.exit(self.exit_code)
+
+ async def _run(self) -> None:
+ runner = None
+ if isinstance(self.wsgi, Application):
+ app = self.wsgi
+ elif inspect.iscoroutinefunction(self.wsgi) or (
+ sys.version_info < (3, 14) and asyncio.iscoroutinefunction(self.wsgi)
+ ):
+ wsgi = await self.wsgi()
+ if isinstance(wsgi, web.AppRunner):
+ runner = wsgi
+ app = runner.app
+ else:
+ app = wsgi
+ else:
+ raise RuntimeError(
+ "wsgi app should be either Application or "
+ "async function returning Application, got {}".format(self.wsgi)
+ )
+
+ if runner is None:
+ access_log = self.log.access_log if self.cfg.accesslog else None
+ runner = web.AppRunner(
+ app,
+ logger=self.log,
+ keepalive_timeout=self.cfg.keepalive,
+ access_log=access_log,
+ access_log_format=self._get_valid_log_format(
+ self.cfg.access_log_format
+ ),
+ shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
+ )
+ await runner.setup()
+
+ ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
+
+ runner = runner
+ assert runner is not None
+ server = runner.server
+ assert server is not None
+ for sock in self.sockets:
+ site = web.SockSite(
+ runner,
+ sock,
+ ssl_context=ctx,
+ )
+ await site.start()
+
+ # If our parent changed then we shut down.
+ pid = os.getpid()
+ try:
+ while self.alive: # type: ignore[has-type]
+ self.notify()
+
+ cnt = server.requests_count
+ if self.max_requests and cnt > self.max_requests:
+ self.alive = False
+ self.log.info("Max requests, shutting down: %s", self)
+
+ elif pid == os.getpid() and self.ppid != os.getppid():
+ self.alive = False
+ self.log.info("Parent changed, shutting down: %s", self)
+ else:
+ await self._wait_next_notify()
+ except BaseException:
+ pass
+
+ await runner.cleanup()
+
+ def _wait_next_notify(self) -> "asyncio.Future[bool]":
+ self._notify_waiter_done()
+
+ loop = self.loop
+ assert loop is not None
+ self._notify_waiter = waiter = loop.create_future()
+ self.loop.call_later(1.0, self._notify_waiter_done, waiter)
+
+ return waiter
+
+ def _notify_waiter_done(
+ self, waiter: Optional["asyncio.Future[bool]"] = None
+ ) -> None:
+ if waiter is None:
+ waiter = self._notify_waiter
+ if waiter is not None:
+ set_result(waiter, True)
+
+ if waiter is self._notify_waiter:
+ self._notify_waiter = None
+
+ def init_signals(self) -> None:
+ # Set up signals through the event loop API.
+
+ self.loop.add_signal_handler(
+ signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGINT, self.handle_quit, signal.SIGINT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
+ )
+
+ # Don't let SIGTERM and SIGUSR1 disturb active requests
+ # by interrupting system calls
+ signal.siginterrupt(signal.SIGTERM, False)
+ signal.siginterrupt(signal.SIGUSR1, False)
+ # Reset signals so Gunicorn doesn't swallow subprocess return codes
+ # See: https://github.com/aio-libs/aiohttp/issues/6130
+
+ def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
+ self.alive = False
+
+ # worker_int callback
+ self.cfg.worker_int(self)
+
+ # wakeup closing process
+ self._notify_waiter_done()
+
+ def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
+ self.alive = False
+ self.exit_code = 1
+ self.cfg.worker_abort(self)
+ sys.exit(1)
+
+ @staticmethod
+ def _create_ssl_context(cfg: Any) -> "SSLContext":
+ """Creates SSLContext instance for usage in asyncio.create_server.
+
+ See ssl.SSLSocket.__init__ for more details.
+ """
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+
+ ctx = ssl.SSLContext(cfg.ssl_version)
+ ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
+ ctx.verify_mode = cfg.cert_reqs
+ if cfg.ca_certs:
+ ctx.load_verify_locations(cfg.ca_certs)
+ if cfg.ciphers:
+ ctx.set_ciphers(cfg.ciphers)
+ return ctx
+
+ def _get_valid_log_format(self, source_format: str) -> str:
+ if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
+ return self.DEFAULT_AIOHTTP_LOG_FORMAT
+ elif re.search(r"%\([^\)]+\)", source_format):
+ raise ValueError(
+ "Gunicorn's style options in form of `%(name)s` are not "
+ "supported for the log formatting. Please use aiohttp's "
+ "format specification to configure access log formatting: "
+ "http://docs.aiohttp.org/en/stable/logging.html"
+ "#format-specification"
+ )
+ else:
+ return source_format
+
+
+class GunicornUVLoopWebWorker(GunicornWebWorker):
+ def init_process(self) -> None:
+ import uvloop
+
+ # Close any existing event loop before setting a
+ # new policy.
+ asyncio.get_event_loop().close()
+
+ # Setup uvloop policy, so that every
+ # asyncio.get_event_loop() will create an instance
+ # of uvloop event loop.
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+
+ super().init_process()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA"
new file mode 100644
index 0000000..03a6f0f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/METADATA"
@@ -0,0 +1,112 @@
+Metadata-Version: 2.4
+Name: aiosignal
+Version: 1.4.0
+Summary: aiosignal: a list of registered asynchronous callbacks
+Home-page: https://github.com/aio-libs/aiosignal
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache 2.0
+Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal
+Project-URL: Docs: RTD, https://docs.aiosignal.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Framework :: AsyncIO
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: frozenlist>=1.1.0
+Requires-Dist: typing-extensions>=4.2; python_version < "3.13"
+Dynamic: license-file
+
+=========
+aiosignal
+=========
+
+.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg?flag=pytest
+ :target: https://codecov.io/gh/aio-libs/aiosignal?flags[0]=pytest
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiosignal.svg
+ :target: https://pypi.org/project/aiosignal
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest
+ :target: https://aiosignal.readthedocs.io/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F
+ :target: https://aio-libs.discourse.group/
+ :alt: Discourse group for io-libs
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+Introduction
+============
+
+A project to manage callbacks in `asyncio` projects.
+
+``Signal`` is a list of registered asynchronous callbacks.
+
+The signal's life-cycle has two stages: after creation its content
+could be filled by using standard list operations: ``sig.append()``
+etc.
+
+After you call ``sig.freeze()`` the signal is *frozen*: adding, removing
+and dropping callbacks is forbidden.
+
+The only available operation is calling the previously registered
+callbacks by using ``await sig.send(data)``.
+
+For concrete usage examples see the `Signals
+<https://docs.aiohttp.org/en/stable/web_advanced.html#aiohttp-web-signals>
+section of the `Web Server Advanced
+<https://docs.aiohttp.org/en/stable/web_advanced.html>` chapter of the `aiohttp
+documentation`_.
+
+
+Installation
+------------
+
+::
+
+ $ pip install aiosignal
+
+
+Documentation
+=============
+
+https://aiosignal.readthedocs.io/
+
+License
+=======
+
+``aiosignal`` is offered under the Apache 2 license.
+
+Source code
+===========
+
+The project is hosted on GitHub_
+
+Please file an issue in the `bug tracker
+<https://github.com/aio-libs/aiosignal/issues>`_ if you have found a bug
+or have some suggestions to improve the library.
+
+.. _GitHub: https://github.com/aio-libs/aiosignal
+.. _aiohttp documentation: https://docs.aiohttp.org/
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD"
new file mode 100644
index 0000000..11bc848
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/RECORD"
@@ -0,0 +1,9 @@
+aiosignal-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+aiosignal-1.4.0.dist-info/METADATA,sha256=CSR-8dqLxpZyjUcTDnAuQwf299EB1sSFv_nzpxznAI0,3662
+aiosignal-1.4.0.dist-info/RECORD,,
+aiosignal-1.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
+aiosignal-1.4.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
+aiosignal-1.4.0.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10
+aiosignal/__init__.py,sha256=TIkmUG9HTBt4dfq2nISYBiZiRB2xwvFtEZydLP0HPL4,1537
+aiosignal/__pycache__/__init__.cpython-312.pyc,,
+aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL"
new file mode 100644
index 0000000..e7fa31b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..7082a2d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/licenses/LICENSE"
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt"
new file mode 100644
index 0000000..ac6df3a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal-1.4.0.dist-info/top_level.txt"
@@ -0,0 +1 @@
+aiosignal
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal/__init__.py"
new file mode 100644
index 0000000..5ede009
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal/__init__.py"
@@ -0,0 +1,59 @@
+import sys
+from typing import Any, Awaitable, Callable, TypeVar
+
+from frozenlist import FrozenList
+
+if sys.version_info >= (3, 11):
+ from typing import Unpack
+else:
+ from typing_extensions import Unpack
+
+if sys.version_info >= (3, 13):
+ from typing import TypeVarTuple
+else:
+ from typing_extensions import TypeVarTuple
+
+_T = TypeVar("_T")
+_Ts = TypeVarTuple("_Ts", default=Unpack[tuple[()]])
+
+__version__ = "1.4.0"
+
+__all__ = ("Signal",)
+
+
+class Signal(FrozenList[Callable[[Unpack[_Ts]], Awaitable[object]]]):
+ """Coroutine-based signal implementation.
+
+ To connect a callback to a signal, use any list method.
+
+ Signals are fired using the send() coroutine, which takes named
+ arguments.
+ """
+
+ __slots__ = ("_owner",)
+
+ def __init__(self, owner: object):
+ super().__init__()
+ self._owner = owner
+
+ def __repr__(self) -> str:
+ return "<Signal owner={}, frozen={}, {!r}>".format(
+ self._owner, self.frozen, list(self)
+ )
+
+ async def send(self, *args: Unpack[_Ts], **kwargs: Any) -> None:
+ """
+ Sends data to all registered receivers.
+ """
+ if not self.frozen:
+ raise RuntimeError("Cannot send non-frozen signal.")
+
+ for receiver in self:
+ await receiver(*args, **kwargs)
+
+ def __call__(
+ self, func: Callable[[Unpack[_Ts]], Awaitable[_T]]
+ ) -> Callable[[Unpack[_Ts]], Awaitable[_T]]:
+ """Decorator to add a function to this Signal."""
+ self.append(func)
+ return func
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/aiosignal/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/__init__.py"
new file mode 100644
index 0000000..5c6e065
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/__init__.py"
@@ -0,0 +1,104 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Classes Without Boilerplate
+"""
+
+from functools import partial
+from typing import Callable, Literal, Protocol
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Converter,
+ Factory,
+ _Nothing,
+ attrib,
+ attrs,
+ evolve,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._next_gen import define, field, frozen, mutable
+from ._version_info import VersionInfo
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+
+class AttrsInstance(Protocol):
+ pass
+
+
+NothingType = Literal[_Nothing.NOTHING]
+
+__all__ = [
+ "NOTHING",
+ "Attribute",
+ "AttrsInstance",
+ "Converter",
+ "Factory",
+ "NothingType",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "field",
+ "fields",
+ "fields_dict",
+ "filters",
+ "frozen",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "mutable",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+
+def _make_getattr(mod_name: str) -> Callable:
+ """
+ Create a metadata proxy for packaging information that uses *mod_name* in
+ its warnings and errors.
+ """
+
+ def __getattr__(name: str) -> str:
+ if name not in ("__version__", "__version_info__"):
+ msg = f"module {mod_name} has no attribute {name}"
+ raise AttributeError(msg)
+
+ from importlib.metadata import metadata
+
+ meta = metadata("attrs")
+
+ if name == "__version_info__":
+ return VersionInfo._from_version_string(meta["version"])
+
+ return meta["version"]
+
+ return __getattr__
+
+
+__getattr__ = _make_getattr(__name__)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/__init__.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/__init__.pyi"
new file mode 100644
index 0000000..8d78fa1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/__init__.pyi"
@@ -0,0 +1,389 @@
+import enum
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ Literal,
+ Mapping,
+ Protocol,
+ Sequence,
+ TypeVar,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._typing_compat import AttrsInstance_
+from ._version_info import VersionInfo
+from attrs import (
+ define as define,
+ field as field,
+ mutable as mutable,
+ frozen as frozen,
+ _EqOrderType,
+ _ValidatorType,
+ _ConverterType,
+ _ReprArgType,
+ _OnSetAttrType,
+ _OnSetAttrArgType,
+ _FieldTransformer,
+ _ValidatorArgType,
+)
+
+if sys.version_info >= (3, 10):
+ from typing import TypeGuard, TypeAlias
+else:
+ from typing_extensions import TypeGuard, TypeAlias
+
+if sys.version_info >= (3, 11):
+ from typing import dataclass_transform
+else:
+ from typing_extensions import dataclass_transform
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_FilterType = Callable[["Attribute[_T]", _T], bool]
+
+# We subclass this here to keep the protocol's qualified name clean.
+class AttrsInstance(AttrsInstance_, Protocol):
+ pass
+
+_A = TypeVar("_A", bound=type[AttrsInstance])
+
+class _Nothing(enum.Enum):
+ NOTHING = enum.auto()
+
+NOTHING = _Nothing.NOTHING
+NothingType: TypeAlias = Literal[_Nothing.NOTHING]
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+
+@overload
+def Factory(factory: Callable[[], _T]) -> _T: ...
+@overload
+def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+) -> _T: ...
+@overload
+def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+) -> _T: ...
+
+In = TypeVar("In")
+Out = TypeVar("Out")
+
+class Converter(Generic[In, Out]):
+ @overload
+ def __init__(self, converter: Callable[[In], Out]) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, AttrsInstance, Attribute], Out],
+ *,
+ takes_self: Literal[True],
+ takes_field: Literal[True],
+ ) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, Attribute], Out],
+ *,
+ takes_field: Literal[True],
+ ) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, AttrsInstance], Out],
+ *,
+ takes_self: Literal[True],
+ ) -> None: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: _T | None
+ validator: _ValidatorType[_T] | None
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: bool | None
+ init: bool
+ converter: Converter | None
+ metadata: dict[Any, Any]
+ type: type[_T] | None
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ alias: str | None
+
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=<some callable>) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: type[_T] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: type[_T] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: _T | None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: object = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+ unsafe_hash: bool | None = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+ unsafe_hash: bool | None = ...,
+) -> Callable[[_C], _C]: ...
+def fields(cls: type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+ cls: _A,
+ globalns: dict[str, Any] | None = ...,
+ localns: dict[str, Any] | None = ...,
+ attribs: list[Attribute[Any]] | None = ...,
+ include_extras: bool = ...,
+) -> _A: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: list[str] | tuple[str, ...] | dict[str, Any],
+ bases: tuple[type, ...] = ...,
+ class_body: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: _FilterType[Any] | None = ...,
+ dict_factory: type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
+ tuple_keys: bool | None = ...,
+) -> dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: _FilterType[Any] | None = ...,
+ tuple_factory: type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> tuple[Any, ...]: ...
+def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_cmp.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_cmp.py"
new file mode 100644
index 0000000..09bab49
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_cmp.py"
@@ -0,0 +1,160 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import __ne__
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
+ and ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if at least
+ one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ Args:
+ eq (typing.Callable | None):
+ Callable used to evaluate equality of two objects.
+
+ lt (typing.Callable | None):
+ Callable used to evaluate whether one object is less than another
+ object.
+
+ le (typing.Callable | None):
+ Callable used to evaluate whether one object is less than or equal
+ to another object.
+
+ gt (typing.Callable | None):
+ Callable used to evaluate whether one object is greater than
+ another object.
+
+ ge (typing.Callable | None):
+ Callable used to evaluate whether one object is greater than or
+ equal to another object.
+
+ require_same_type (bool):
+ When `True`, equality and ordering methods will return
+ `NotImplemented` if objects are not of the same type.
+
+ class_name (str | None): Name of class. Defaults to "Comparable".
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = __ne__
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = types.new_class(
+ class_name, (object,), {}, lambda ns: ns.update(body)
+ )
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
+ raise ValueError(msg)
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = f"__{name}__"
+ method.__doc__ = (
+ f"Return a {_operation_names[name]} b. Computed by attrs."
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ return all(func(self, other) for func in self._requirements)
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_cmp.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_cmp.pyi"
new file mode 100644
index 0000000..cc7893b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_cmp.pyi"
@@ -0,0 +1,13 @@
+from typing import Any, Callable
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+ eq: _CompareWithType | None = ...,
+ lt: _CompareWithType | None = ...,
+ le: _CompareWithType | None = ...,
+ gt: _CompareWithType | None = ...,
+ ge: _CompareWithType | None = ...,
+ require_same_type: bool = ...,
+ class_name: str = ...,
+) -> type: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_compat.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_compat.py"
new file mode 100644
index 0000000..bc68ed9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_compat.py"
@@ -0,0 +1,99 @@
+# SPDX-License-Identifier: MIT
+
+import inspect
+import platform
+import sys
+import threading
+
+from collections.abc import Mapping, Sequence # noqa: F401
+from typing import _GenericAlias
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
+PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
+PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
+PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
+PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
+
+
+if PY_3_14_PLUS:
+ import annotationlib
+
+ # We request forward-ref annotations to not break in the presence of
+ # forward references.
+
+ def _get_annotations(cls):
+ return annotationlib.get_annotations(
+ cls, format=annotationlib.Format.FORWARDREF
+ )
+
+else:
+
+ def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ return cls.__dict__.get("__annotations__", {})
+
+
+class _AnnotationExtractor:
+ """
+ Extract type annotations from a callable, returning None whenever there
+ is none.
+ """
+
+ __slots__ = ["sig"]
+
+ def __init__(self, callable):
+ try:
+ self.sig = inspect.signature(callable)
+ except (ValueError, TypeError): # inspect failed
+ self.sig = None
+
+ def get_first_param_type(self):
+ """
+ Return the type annotation of the first argument if it's not empty.
+ """
+ if not self.sig:
+ return None
+
+ params = list(self.sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ return params[0].annotation
+
+ return None
+
+ def get_return_type(self):
+ """
+ Return the return type if it's not empty.
+ """
+ if (
+ self.sig
+ and self.sig.return_annotation is not inspect.Signature.empty
+ ):
+ return self.sig.return_annotation
+
+ return None
+
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
+
+
+def get_generic_base(cl):
+ """If this is a generic class (A[str]), return the generic base for it."""
+ if cl.__class__ is _GenericAlias:
+ return cl.__origin__
+ return None
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_config.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_config.py"
new file mode 100644
index 0000000..4b25772
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_config.py"
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+__all__ = ["get_run_validators", "set_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
+ """
+ if not isinstance(run, bool):
+ msg = "'run' must be bool."
+ raise TypeError(msg)
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
+ """
+ return _run_validators
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_funcs.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_funcs.py"
new file mode 100644
index 0000000..1adb500
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_funcs.py"
@@ -0,0 +1,497 @@
+# SPDX-License-Identifier: MIT
+
+
+import copy
+
+from ._compat import get_generic_base
+from ._make import _OBJ_SETATTR, NOTHING, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+_ATOMIC_TYPES = frozenset(
+ {
+ type(None),
+ bool,
+ int,
+ float,
+ str,
+ complex,
+ bytes,
+ type(...),
+ type,
+ range,
+ property,
+ }
+)
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the *attrs* attribute values of *inst* as a dict.
+
+ Optionally recurse into other *attrs*-decorated classes.
+
+ Args:
+ inst: Instance of an *attrs*-decorated class.
+
+ recurse (bool): Recurse into classes that are also *attrs*-decorated.
+
+ filter (~typing.Callable):
+ A callable whose return code determines whether an attribute or
+ element is included (`True`) or dropped (`False`). Is called with
+ the `attrs.Attribute` as the first argument and the value as the
+ second argument.
+
+ dict_factory (~typing.Callable):
+ A callable to produce dictionaries from. For example, to produce
+ ordered dictionaries instead of normal Python dictionaries, pass in
+ ``collections.OrderedDict``.
+
+ retain_collection_types (bool):
+ Do not convert to `list` when encountering an attribute whose type
+ is `tuple` or `set`. Only meaningful if *recurse* is `True`.
+
+ value_serializer (typing.Callable | None):
+ A hook that is called for every attribute or dict key/value. It
+ receives the current instance, field and value and must return the
+ (updated) value. The hook is run *after* the optional *filter* has
+ been applied.
+
+ Returns:
+ Return type of *dict_factory*.
+
+ Raises:
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0
+ If a dict has a collection for a key, it is serialized as a tuple.
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ value_type = type(v)
+ if value_type in _ATOMIC_TYPES:
+ rv[a.name] = v
+ elif has(value_type):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif issubclass(value_type, (tuple, list, set, frozenset)):
+ cf = value_type if retain_collection_types is True else list
+ items = [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ try:
+ rv[a.name] = cf(items)
+ except TypeError:
+ if not issubclass(cf, tuple):
+ raise
+ # Workaround for TypeError: cf.__new__() missing 1 required
+ # positional argument (which appears, for a namedturle)
+ rv[a.name] = cf(*items)
+ elif issubclass(value_type, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ val_type = type(val)
+ if val_type in _ATOMIC_TYPES:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+ elif getattr(val_type, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif issubclass(val_type, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif issubclass(val_type, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in val.items()
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the *attrs* attribute values of *inst* as a tuple.
+
+ Optionally recurse into other *attrs*-decorated classes.
+
+ Args:
+ inst: Instance of an *attrs*-decorated class.
+
+ recurse (bool):
+ Recurse into classes that are also *attrs*-decorated.
+
+ filter (~typing.Callable):
+ A callable whose return code determines whether an attribute or
+ element is included (`True`) or dropped (`False`). Is called with
+ the `attrs.Attribute` as the first argument and the value as the
+ second argument.
+
+ tuple_factory (~typing.Callable):
+ A callable to produce tuples from. For example, to produce lists
+ instead of tuples.
+
+ retain_collection_types (bool):
+ Do not convert to `list` or `dict` when encountering an attribute
+ which type is `tuple`, `dict` or `set`. Only meaningful if
+ *recurse* is `True`.
+
+ Returns:
+ Return type of *tuple_factory*
+
+ Raises:
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ value_type = type(v)
+ if recurse is True:
+ if value_type in _ATOMIC_TYPES:
+ rv.append(v)
+ elif has(value_type):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif issubclass(value_type, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ items = [
+ (
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ )
+ for j in v
+ ]
+ try:
+ rv.append(cf(items))
+ except TypeError:
+ if not issubclass(cf, tuple):
+ raise
+ # Workaround for TypeError: cf.__new__() missing 1 required
+ # positional argument (which appears, for a namedturle)
+ rv.append(cf(*items))
+ elif issubclass(value_type, dict):
+ df = value_type if retain is True else dict
+ rv.append(
+ df(
+ (
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk
+ ),
+ (
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with *attrs* attributes.
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ Returns:
+ bool:
+ """
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is not None:
+ return True
+
+ # No attrs, maybe it's a specialized generic (A[str])?
+ generic_base = get_generic_base(cls)
+ if generic_base is not None:
+ generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
+ if generic_attrs is not None:
+ # Stick it on here for speed next time.
+ cls.__attrs_attrs__ = generic_attrs
+ return generic_attrs is not None
+ return False
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ This is different from `evolve` that applies the changes to the arguments
+ that create the new instance.
+
+ `evolve`'s behavior is preferable, but there are `edge cases`_ where it
+ doesn't work. Therefore `assoc` is deprecated, but will not be removed.
+
+ .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
+
+ Args:
+ inst: Instance of a class with *attrs* attributes.
+
+ changes: Keyword changes in the new copy.
+
+ Returns:
+ A copy of inst with *changes* incorporated.
+
+ Raises:
+ attrs.exceptions.AttrsAttributeNotFoundError:
+ If *attr_name* couldn't be found on *cls*.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. deprecated:: 17.1.0
+ Use `attrs.evolve` instead if you can. This function will not be
+ removed du to the slightly different approach compared to
+ `attrs.evolve`, though.
+ """
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in changes.items():
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ msg = f"{k} is not an attrs attribute on {new.__class__}."
+ raise AttrsAttributeNotFoundError(msg)
+ _OBJ_SETATTR(new, k, v)
+ return new
+
+
+def resolve_types(
+ cls, globalns=None, localns=None, attribs=None, include_extras=True
+):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in :class:`Attribute`'s
+ *type* field. In other words, you don't need to resolve your types if you
+ only use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, for example, if the name only
+ exists inside a method, you may pass *globalns* or *localns* to specify
+ other dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ Args:
+ cls (type): Class to resolve.
+
+ globalns (dict | None): Dictionary containing global variables.
+
+ localns (dict | None): Dictionary containing local variables.
+
+ attribs (list | None):
+ List of attribs for the given class. This is necessary when calling
+ from inside a ``field_transformer`` since *cls* is not an *attrs*
+ class yet.
+
+ include_extras (bool):
+ Resolve more accurately, if possible. Pass ``include_extras`` to
+ ``typing.get_hints``, if supported by the typing module. On
+ supported Python versions (3.9+), this resolves the types more
+ accurately.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class and you didn't pass any attribs.
+
+ NameError: If types cannot be resolved because of missing variables.
+
+ Returns:
+ *cls* so you can use this function also as a class decorator. Please
+ note that you have to apply it **after** `attrs.define`. That means the
+ decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+ .. versionadded:: 23.1.0 *include_extras*
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ kwargs = {
+ "globalns": globalns,
+ "localns": localns,
+ "include_extras": include_extras,
+ }
+
+ hints = typing.get_type_hints(cls, **kwargs)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _OBJ_SETATTR(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_make.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_make.py"
new file mode 100644
index 0000000..d24d9ba
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_make.py"
@@ -0,0 +1,3362 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+import abc
+import contextlib
+import copy
+import enum
+import inspect
+import itertools
+import linecache
+import sys
+import types
+import unicodedata
+import weakref
+
+from collections.abc import Callable, Mapping
+from functools import cached_property
+from typing import Any, NamedTuple, TypeVar
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ PY_3_10_PLUS,
+ PY_3_11_PLUS,
+ PY_3_13_PLUS,
+ _AnnotationExtractor,
+ _get_annotations,
+ get_generic_base,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_OBJ_SETATTR = object.__setattr__
+_INIT_FACTORY_PAT = "__attr_factory_%s"
+_CLASSVAR_PREFIXES = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_HASH_CACHE_FIELD = "_attrs_cached_hash"
+
+_EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_SENTINEL = object()
+
+_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing(enum.Enum):
+ """
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+ If extending attrs, you can use ``typing.Literal[NOTHING]`` to show
+ that a value may be ``NOTHING``.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
+ """
+
+ NOTHING = enum.auto()
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+
+NOTHING = _Nothing.NOTHING
+"""
+Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+When using in 3rd party code, use `attrs.NothingType` for type annotations.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since `None`
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=None,
+ eq=None,
+ order=None,
+ on_setattr=None,
+ alias=None,
+):
+ """
+ Create a new field / attribute on a class.
+
+ Identical to `attrs.field`, except it's not keyword-only.
+
+ Consider using `attrs.field` in new code (``attr.ib`` will *never* go away,
+ though).
+
+ .. warning::
+
+ Does **nothing** unless the class is also decorated with
+ `attr.s` (or similar)!
+
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is `None` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0
+ *converter* as a replacement for the deprecated *convert* to achieve
+ consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 22.2.0 *alias*
+ .. versionchanged:: 25.4.0
+ *kw_only* can now be None, and its default is also changed from False to
+ None.
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ msg = "Invalid value for hash. Must be True, False, or None."
+ raise TypeError(msg)
+
+ if factory is not None:
+ if default is not NOTHING:
+ msg = (
+ "The `default` and `factory` arguments are mutually exclusive."
+ )
+ raise ValueError(msg)
+ if not callable(factory):
+ msg = "The `factory` argument must be a callable."
+ raise ValueError(msg)
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ alias=alias,
+ )
+
+
+def _compile_and_eval(
+ script: str,
+ globs: dict[str, Any] | None,
+ locs: Mapping[str, object] | None = None,
+ filename: str = "",
+) -> None:
+ """
+ Evaluate the script with the given global (globs) and local (locs)
+ variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _linecache_and_compile(
+ script: str,
+ filename: str,
+ globs: dict[str, Any] | None,
+ locals: Mapping[str, object] | None = None,
+) -> dict[str, Any]:
+ """
+ Cache the script with _linecache_, compile it and return the _locals_.
+ """
+
+ locs = {} if locals is None else locals
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+
+ filename = f"{base_filename[:-1]}-{count}>"
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs
+
+
+def _make_attr_tuple_class(cls_name: str, attr_names: list[str]) -> type:
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = f"{cls_name}Attributes"
+ body = {}
+ for i, attr_name in enumerate(attr_names):
+
+ def getter(self, i=i):
+ return self[i]
+
+ body[attr_name] = property(getter)
+ return type(attr_class_name, (tuple,), body)
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+class _Attributes(NamedTuple):
+ attrs: type
+ base_attrs: list[Attribute]
+ base_attrs_map: dict[str, type]
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_CLASSVAR_PREFIXES)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+ """
+ return attrib_name in cls.__dict__
+
+
+def _collect_base_attrs(
+ cls, taken_attr_names
+) -> tuple[list[Attribute], dict[str, type]]:
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True) # noqa: PLW2901
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True) # noqa: PLW2901
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+) -> _Attributes:
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ If *collect_by_mro* is True, collect them in the correct MRO order,
+ otherwise use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = list(these.items())
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if attr.__class__ is _CountingAttr
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if a.__class__ is not _CountingAttr:
+ a = attrib(a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if unannotated:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if attr.__class__ is _CountingAttr
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ fca = Attribute.from_counting_attr
+ no = ClassProps.KeywordOnly.NO
+ own_attrs = [
+ fca(
+ attr_name,
+ ca,
+ kw_only is not no,
+ anns.get(attr_name),
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only is ClassProps.KeywordOnly.FORCE:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ if field_transformer is not None:
+ attrs = tuple(field_transformer(cls, attrs))
+
+ # Check attr order after executing the field_transformer.
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}"
+ raise ValueError(msg)
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ # Resolve default field alias after executing field_transformer.
+ # This allows field_transformer to differentiate between explicit vs
+ # default aliases and supply their own defaults.
+ for a in attrs:
+ if not a.alias:
+ # Evolve is very slow, so we hold our nose and do it dirty.
+ _OBJ_SETATTR.__get__(a)("alias", _default_init_alias_for(a.name))
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes(AttrsClass(attrs), base_attrs, base_attr_map)
+
+
+def _make_cached_property_getattr(cached_properties, original_getattr, cls):
+ lines = [
+ # Wrapped to get `__class__` into closure cell for super()
+ # (It will be replaced with the newly constructed class after construction).
+ "def wrapper(_cls):",
+ " __class__ = _cls",
+ " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
+ " func = cached_properties.get(item)",
+ " if func is not None:",
+ " result = func(self)",
+ " _setter = _cached_setattr_get(self)",
+ " _setter(item, result)",
+ " return result",
+ ]
+ if original_getattr is not None:
+ lines.append(
+ " return original_getattr(self, item)",
+ )
+ else:
+ lines.extend(
+ [
+ " try:",
+ " return super().__getattribute__(item)",
+ " except AttributeError:",
+ " if not hasattr(super(), '__getattr__'):",
+ " raise",
+ " return super().__getattr__(item)",
+ " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
+ " raise AttributeError(original_error)",
+ ]
+ )
+
+ lines.extend(
+ [
+ " return __getattr__",
+ "__getattr__ = wrapper(_cls)",
+ ]
+ )
+
+ unique_filename = _generate_unique_filename(cls, "getattr")
+
+ glob = {
+ "cached_properties": cached_properties,
+ "_cached_setattr_get": _OBJ_SETATTR.__get__,
+ "original_getattr": original_getattr,
+ }
+
+ return _linecache_and_compile(
+ "\n".join(lines), unique_filename, glob, locals={"_cls": cls}
+ )["__getattr__"]
+
+
+def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ "__traceback__",
+ "__suppress_context__",
+ "__notes__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ if isinstance(self, BaseException) and name in ("__notes__",):
+ BaseException.__delattr__(self, name)
+ return
+
+ raise FrozenInstanceError
+
+
+def evolve(*args, **changes):
+ """
+ Create a new instance, based on the first positional argument with
+ *changes* applied.
+
+ .. tip::
+
+ On Python 3.13 and later, you can also use `copy.replace` instead.
+
+ Args:
+
+ inst:
+ Instance of a class with *attrs* attributes. *inst* must be passed
+ as a positional argument.
+
+ changes:
+ Keyword changes in the new copy.
+
+ Returns:
+ A copy of inst with *changes* incorporated.
+
+ Raises:
+ TypeError:
+ If *attr_name* couldn't be found in the class ``__init__``.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 17.1.0
+ .. deprecated:: 23.1.0
+ It is now deprecated to pass the instance using the keyword argument
+ *inst*. It will raise a warning until at least April 2024, after which
+ it will become an error. Always pass the instance as a positional
+ argument.
+ .. versionchanged:: 24.1.0
+ *inst* can't be passed as a keyword argument anymore.
+ """
+ try:
+ (inst,) = args
+ except ValueError:
+ msg = (
+ f"evolve() takes 1 positional argument, but {len(args)} were given"
+ )
+ raise TypeError(msg) from None
+
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = a.alias
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+class _ClassBuilder:
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_add_method_dunders",
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_custom_setattr",
+ "_has_post_init",
+ "_has_pre_init",
+ "_is_exc",
+ "_on_setattr",
+ "_pre_init_has_args",
+ "_repr_added",
+ "_script_snippets",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ )
+
+ def __init__(
+ self,
+ cls: type,
+ these,
+ auto_attribs: bool,
+ props: ClassProps,
+ has_custom_setattr: bool,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ props.kw_only,
+ props.collected_fields_by_mro,
+ props.field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if props.is_slotted else {}
+ self._attrs = attrs
+ self._base_names = {a.name for a in base_attrs}
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = props.is_slotted
+ self._frozen = props.is_frozen
+ self._weakref_slot = props.has_weakref_slot
+ self._cache_hash = (
+ props.hashability is ClassProps.Hashability.HASHABLE_CACHED
+ )
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._pre_init_has_args = False
+ if self._has_pre_init:
+ # Check if the pre init method has more arguments than just `self`
+ # We want to pass arguments if pre init expects arguments
+ pre_init_func = cls.__attrs_pre_init__
+ pre_init_signature = inspect.signature(pre_init_func)
+ self._pre_init_has_args = len(pre_init_signature.parameters) > 1
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = props.is_exception
+ self._on_setattr = props.on_setattr_hook
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+ self._cls_dict["__attrs_props__"] = props
+
+ if props.is_frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif self._on_setattr in (
+ _DEFAULT_ON_SETATTR,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ self._on_setattr == _DEFAULT_ON_SETATTR
+ and not (has_validator or has_converter)
+ )
+ or (self._on_setattr == setters.validate and not has_validator)
+ or (self._on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if props.added_pickling:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ # tuples of script, globs, hook
+ self._script_snippets: list[
+ tuple[str, dict, Callable[[dict, dict], Any]]
+ ] = []
+ self._repr_added = False
+
+ # We want to only do this check once; in 99.9% of cases these
+ # exist.
+ if not hasattr(self._cls, "__module__") or not hasattr(
+ self._cls, "__qualname__"
+ ):
+ self._add_method_dunders = self._add_method_dunders_safe
+ else:
+ self._add_method_dunders = self._add_method_dunders_unsafe
+
+ def __repr__(self):
+ return f"<_ClassBuilder(cls={self._cls.__name__})>"
+
+ def _eval_snippets(self) -> None:
+ """
+ Evaluate any registered snippets in one go.
+ """
+ script = "\n".join([snippet[0] for snippet in self._script_snippets])
+ globs = {}
+ for _, snippet_globs, _ in self._script_snippets:
+ globs.update(snippet_globs)
+
+ locs = _linecache_and_compile(
+ script,
+ _generate_unique_filename(self._cls, "methods"),
+ globs,
+ )
+
+ for _, _, hook in self._script_snippets:
+ hook(self._cls_dict, locs)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ self._eval_snippets()
+ if self._slots is True:
+ cls = self._create_slots_class()
+ self._cls.__attrs_base_of_slotted__ = weakref.ref(cls)
+ else:
+ cls = self._patch_original_class()
+ if PY_3_10_PLUS:
+ cls = abc.update_abstractmethods(cls)
+
+ # The method gets only called if it's not inherited from a base class.
+ # _has_own_attribute does NOT work properly for classmethods.
+ if (
+ getattr(cls, "__attrs_init_subclass__", None)
+ and "__attrs_init_subclass__" not in cls.__dict__
+ ):
+ cls.__attrs_init_subclass__()
+
+ return cls
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _SENTINEL) is not _SENTINEL
+ ):
+ # An AttributeError can happen if a base class defines a
+ # class variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ with contextlib.suppress(AttributeError):
+ delattr(cls, name)
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = _OBJ_SETATTR
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in self._cls_dict.items()
+ if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
+ }
+
+ # 3.14.0rc2+
+ if hasattr(sys, "_clear_type_descriptors"):
+ sys._clear_type_descriptors(self._cls)
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = _OBJ_SETATTR
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = {}
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ cached_properties = {
+ name: cached_prop.func
+ for name, cached_prop in cd.items()
+ if isinstance(cached_prop, cached_property)
+ }
+
+ # Collect methods with a `__class__` reference that are shadowed in the new class.
+ # To know to update them.
+ additional_closure_functions_to_update = []
+ if cached_properties:
+ class_annotations = _get_annotations(self._cls)
+ for name, func in cached_properties.items():
+ # Add cached properties to names for slotting.
+ names += (name,)
+ # Clear out function from class to avoid clashing.
+ del cd[name]
+ additional_closure_functions_to_update.append(func)
+ annotation = inspect.signature(func).return_annotation
+ if annotation is not inspect.Parameter.empty:
+ class_annotations[name] = annotation
+
+ original_getattr = cd.get("__getattr__")
+ if original_getattr is not None:
+ additional_closure_functions_to_update.append(original_getattr)
+
+ cd["__getattr__"] = _make_cached_property_getattr(
+ cached_properties, original_getattr, self._cls
+ )
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overridden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in existing_slots.items()
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_HASH_CACHE_FIELD)
+
+ cd["__slots__"] = tuple(slot_names)
+
+ cd["__qualname__"] = self._cls.__qualname__
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # <https://github.com/python-attrs/attrs/issues/102>.
+ # If a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in itertools.chain(
+ cls.__dict__.values(), additional_closure_functions_to_update
+ ):
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # noqa: PERF203
+ # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ cell.cell_contents = cls
+ return cls
+
+ def add_repr(self, ns):
+ script, globs = _make_repr_script(self._attrs, ns)
+
+ def _attach_repr(cls_dict, globs):
+ cls_dict["__repr__"] = self._add_method_dunders(globs["__repr__"])
+
+ self._script_snippets.append((script, globs, _attach_repr))
+ self._repr_added = True
+ return self
+
+ def add_str(self):
+ if not self._repr_added:
+ msg = "__str__ can only be generated if a __repr__ exists."
+ raise ValueError(msg)
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return {name: getattr(self, name) for name in state_attr_names}
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _OBJ_SETATTR.__get__(self)
+ if isinstance(state, tuple):
+ # Backward compatibility with attrs instances pickled with
+ # attrs versions before v22.2.0 which stored tuples.
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+ else:
+ for name in state_attr_names:
+ if name in state:
+ __bound_setattr(name, state[name])
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_HASH_CACHE_FIELD, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ script, globs = _make_hash_script(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+
+ def attach_hash(cls_dict: dict, locs: dict) -> None:
+ cls_dict["__hash__"] = self._add_method_dunders(locs["__hash__"])
+
+ self._script_snippets.append((script, globs, attach_hash))
+
+ return self
+
+ def add_init(self):
+ script, globs, annotations = _make_init_script(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._pre_init_has_args,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+
+ def _attach_init(cls_dict, globs):
+ init = globs["__init__"]
+ init.__annotations__ = annotations
+ cls_dict["__init__"] = self._add_method_dunders(init)
+
+ self._script_snippets.append((script, globs, _attach_init))
+
+ return self
+
+ def add_replace(self):
+ self._cls_dict["__replace__"] = self._add_method_dunders(
+ lambda self, **changes: evolve(self, **changes)
+ )
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ script, globs, annotations = _make_init_script(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._pre_init_has_args,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+
+ def _attach_attrs_init(cls_dict, globs):
+ init = globs["__attrs_init__"]
+ init.__annotations__ = annotations
+ cls_dict["__attrs_init__"] = self._add_method_dunders(init)
+
+ self._script_snippets.append((script, globs, _attach_attrs_init))
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ script, globs = _make_eq_script(self._attrs)
+
+ def _attach_eq(cls_dict, globs):
+ cls_dict["__eq__"] = self._add_method_dunders(globs["__eq__"])
+
+ self._script_snippets.append((script, globs, _attach_eq))
+
+ cd["__ne__"] = __ne__
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ msg = "Can't combine custom __setattr__ with on_setattr hooks."
+ raise ValueError(msg)
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _OBJ_SETATTR(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders_unsafe(self, method: Callable) -> Callable:
+ """
+ Add __module__ and __qualname__ to a *method*.
+ """
+ method.__module__ = self._cls.__module__
+
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+ method.__doc__ = (
+ f"Method generated by attrs for class {self._cls.__qualname__}."
+ )
+
+ return method
+
+ def _add_method_dunders_safe(self, method: Callable) -> Callable:
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ with contextlib.suppress(AttributeError):
+ method.__module__ = self._cls.__module__
+
+ with contextlib.suppress(AttributeError):
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+ with contextlib.suppress(AttributeError):
+ method.__doc__ = f"Method generated by attrs for class {self._cls.__qualname__}."
+
+ return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ msg = "Don't mix `cmp` with `eq' and `order`."
+ raise ValueError(msg)
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ msg = "`order` can only be True if `eq` is True too."
+ raise ValueError(msg)
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ msg = "Don't mix `cmp` with `eq' and `order`."
+ raise ValueError(msg)
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ msg = "`order` can only be True if `eq` is True too."
+ raise ValueError(msg)
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+ unsafe_hash=None,
+ force_kw_only=True,
+):
+ r"""
+ A class decorator that adds :term:`dunder methods` according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will
+ *never* go away, though).
+
+ Args:
+ repr_ns (str):
+ When using nested classes, there was no way in Python 2 to
+ automatically detect that. This argument allows to set a custom
+ name for a more meaningful ``repr`` output. This argument is
+ pointless in Python 3 and is therefore deprecated.
+
+ .. caution::
+ Refer to `attrs.define` for the rest of the parameters, but note that they
+ can have different defaults.
+
+ Notably, leaving *on_setattr* as `None` will **not** add any hooks.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports `None` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
+ .. versionadded:: 22.2.0
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+ .. deprecated:: 24.1.0 *repr_ns*
+ .. versionchanged:: 24.1.0
+ Instances are not compared as tuples of attributes anymore, but using a
+ big ``and`` condition. This is faster and has more correct behavior for
+ uncomparable values like `math.nan`.
+ .. versionadded:: 24.1.0
+ If a class has an *inherited* classmethod called
+ ``__attrs_init_subclass__``, it is executed after the class is created.
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+ .. versionchanged:: 25.4.0
+ *kw_only* now only applies to attributes defined in the current class,
+ and respects attribute-level ``kw_only=False`` settings.
+ .. versionadded:: 25.4.0 *force_kw_only*
+ """
+ if repr_ns is not None:
+ import warnings
+
+ warnings.warn(
+ DeprecationWarning(
+ "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
+ ),
+ stacklevel=2,
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+
+ # unsafe_hash takes precedence due to PEP 681.
+ if unsafe_hash is not None:
+ hash = unsafe_hash
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+ nonlocal hash
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ msg = "Can't freeze a class with a custom __setattr__."
+ raise ValueError(msg)
+
+ eq = not is_exc and _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+
+ Hashability = ClassProps.Hashability
+
+ if is_exc:
+ hashability = Hashability.LEAVE_ALONE
+ elif hash is True:
+ hashability = (
+ Hashability.HASHABLE_CACHED
+ if cache_hash
+ else Hashability.HASHABLE
+ )
+ elif hash is False:
+ hashability = Hashability.LEAVE_ALONE
+ elif hash is None:
+ if auto_detect is True and _has_own_attribute(cls, "__hash__"):
+ hashability = Hashability.LEAVE_ALONE
+ elif eq is True and is_frozen is True:
+ hashability = (
+ Hashability.HASHABLE_CACHED
+ if cache_hash
+ else Hashability.HASHABLE
+ )
+ elif eq is False:
+ hashability = Hashability.LEAVE_ALONE
+ else:
+ hashability = Hashability.UNHASHABLE
+ else:
+ msg = "Invalid value for hash. Must be True, False, or None."
+ raise TypeError(msg)
+
+ KeywordOnly = ClassProps.KeywordOnly
+ if kw_only:
+ kwo = KeywordOnly.FORCE if force_kw_only else KeywordOnly.YES
+ else:
+ kwo = KeywordOnly.NO
+
+ props = ClassProps(
+ is_exception=is_exc,
+ is_frozen=is_frozen,
+ is_slotted=slots,
+ collected_fields_by_mro=collect_by_mro,
+ added_init=_determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ),
+ added_repr=_determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ),
+ added_eq=eq,
+ added_ordering=not is_exc
+ and _determine_whether_to_implement(
+ cls,
+ order_,
+ auto_detect,
+ ("__lt__", "__le__", "__gt__", "__ge__"),
+ ),
+ hashability=hashability,
+ added_match_args=match_args,
+ kw_only=kwo,
+ has_weakref_slot=weakref_slot,
+ added_str=str,
+ added_pickling=_determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ on_setattr_hook=on_setattr,
+ field_transformer=field_transformer,
+ )
+
+ if not props.is_hashable and cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
+ raise TypeError(msg)
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ auto_attribs=auto_attribs,
+ props=props,
+ has_custom_setattr=has_own_setattr,
+ )
+
+ if props.added_repr:
+ builder.add_repr(repr_ns)
+
+ if props.added_str:
+ builder.add_str()
+
+ if props.added_eq:
+ builder.add_eq()
+ if props.added_ordering:
+ builder.add_order()
+
+ if not frozen:
+ builder.add_setattr()
+
+ if props.is_hashable:
+ builder.add_hash()
+ elif props.hashability is Hashability.UNHASHABLE:
+ builder.make_unhashable()
+
+ if props.added_init:
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, init must be True."
+ raise TypeError(msg)
+
+ if PY_3_13_PLUS and not _has_own_attribute(cls, "__replace__"):
+ builder.add_replace()
+
+ if (
+ PY_3_10_PLUS
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls: type, func_name: str) -> str:
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ return (
+ f"<attrs generated {func_name} {cls.__module__}."
+ f"{getattr(cls, '__qualname__', cls.__name__)}>"
+ )
+
+
+def _make_hash_script(
+ cls: type, attrs: list[Attribute], frozen: bool, cache_hash: bool
+) -> tuple[str, dict]:
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ type_hash = hash(_generate_unique_filename(cls, "hash"))
+ # If eq is custom generated, we need to include the functions in globs
+ globs = {}
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ hash_def += ", *"
+
+ hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + f" {type_hash},",
+ ]
+ )
+
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = f"_{a.name}_key"
+ globs[cmp_name] = a.eq_key
+ method_lines.append(
+ indent + f" {cmp_name}(self.{a.name}),"
+ )
+ else:
+ method_lines.append(indent + f" self.{a.name},")
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
+ if frozen:
+ append_hash_computation_lines(
+ f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ f"self.{_HASH_CACHE_FIELD} = ", tab * 2
+ )
+ method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return script, globs
+
+
+def _add_hash(cls: type, attrs: list[Attribute]):
+ """
+ Add a hash method to *cls*.
+ """
+ script, globs = _make_hash_script(
+ cls, attrs, frozen=False, cache_hash=False
+ )
+ _compile_and_eval(
+ script, globs, filename=_generate_unique_filename(cls, "__hash__")
+ )
+ cls.__hash__ = globs["__hash__"]
+ return cls
+
+
+def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+
+def _make_eq_script(attrs: list) -> tuple[str, dict]:
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = f"_{a.name}_key"
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
+ )
+ else:
+ lines.append(f" self.{a.name} == other.{a.name}")
+ if a is not attrs[-1]:
+ lines[-1] = f"{lines[-1]} and"
+ lines.append(" )")
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return script, globs
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ script, globs = _make_eq_script(attrs)
+ _compile_and_eval(
+ script, globs, filename=_generate_unique_filename(cls, "__eq__")
+ )
+ cls.__eq__ = globs["__eq__"]
+ cls.__ne__ = __ne__
+
+ return cls
+
+
+def _make_repr_script(attrs, ns) -> tuple[str, dict]:
+ """
+ Create the source and globs for a __repr__ and return it.
+ """
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ f" return f'{cls_name_fragment}({repr_fragment})'",
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return "\n".join(lines), globs
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ script, globs = _make_repr_script(attrs, ns)
+ _compile_and_eval(
+ script, globs, filename=_generate_unique_filename(cls, "__repr__")
+ )
+ cls.__repr__ = globs["__repr__"]
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of *attrs* attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ Returns:
+ tuple (with name accessors) of `attrs.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ .. versionchanged:: 23.1.0 Add support for generic classes.
+ """
+ generic_base = get_generic_base(cls)
+
+ if generic_base is None and not isinstance(cls, type):
+ msg = "Passed object must be a class."
+ raise TypeError(msg)
+
+ attrs = getattr(cls, "__attrs_attrs__", None)
+
+ if attrs is None:
+ if generic_base is not None:
+ attrs = getattr(generic_base, "__attrs_attrs__", None)
+ if attrs is not None:
+ # Even though this is global state, stick it on here to speed
+ # it up. We rely on `cls` being cached for this to be
+ # efficient.
+ cls.__attrs_attrs__ = attrs
+ return attrs
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg)
+
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of *attrs* attributes for a class, whose keys
+ are the attribute names.
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ Returns:
+ dict[str, attrs.Attribute]: Dict of attribute name to definition
+
+ .. versionadded:: 18.1.0
+ """
+ if not isinstance(cls, type):
+ msg = "Passed object must be a class."
+ raise TypeError(msg)
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg)
+ return {a.name: a for a in attrs}
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ Args:
+ inst: Instance of a class with *attrs* attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ cls = base_attr_map.get(a_name)
+ return cls and "__slots__" in cls.__dict__
+
+
+def _make_init_script(
+ cls,
+ attrs,
+ pre_init,
+ pre_init_has_args,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+) -> tuple[str, dict, dict]:
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ msg = "Frozen classes can't use on_setattr."
+ raise ValueError(msg)
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ msg = "Frozen classes can't use on_setattr."
+ raise ValueError(msg)
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ pre_init_has_args,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ "__attrs_init__" if attrs_init else "__init__",
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
+
+ return script, globs, annotations
+
+
+def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return f"_setattr('{attr_name}', {value_var})"
+
+
+def _setattr_with_converter(
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"
+
+
+def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return f"self.{attr_name} = {value}"
+
+
+def _assign_with_converter(
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True, converter)
+
+ return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"
+
+
+def _determine_setters(
+ frozen: bool, slots: bool, base_attr_map: dict[str, type]
+):
+ """
+ Determine the correct setter functions based on whether a class is frozen
+ and/or slotted.
+ """
+ if frozen is True:
+ if slots is True:
+ return (), _setattr, _setattr_with_converter
+
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+
+ def fmt_setter(
+ attr_name: str, value_var: str, has_on_setattr: bool
+ ) -> str:
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return f"_inst_dict['{attr_name}'] = {value_var}"
+
+ def fmt_setter_with_converter(
+ attr_name: str,
+ value_var: str,
+ has_on_setattr: bool,
+ converter: Converter,
+ ) -> str:
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr, converter
+ )
+
+ return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
+
+ return (
+ ("_inst_dict = self.__dict__",),
+ fmt_setter,
+ fmt_setter_with_converter,
+ )
+
+ # Not frozen -- we can just assign directly.
+ return (), _assign, _assign_with_converter
+
+
+def _attrs_to_init_script(
+ attrs: list[Attribute],
+ is_frozen: bool,
+ is_slotted: bool,
+ call_pre_init: bool,
+ pre_init_has_args: bool,
+ call_post_init: bool,
+ does_cache_hash: bool,
+ base_attr_map: dict[str, type],
+ is_exc: bool,
+ needs_cached_setattr: bool,
+ has_cls_on_setattr: bool,
+ method_name: str,
+) -> tuple[str, dict, dict]:
+ """
+ Return a script of an initializer for *attrs*, a dict of globals, and
+ annotations for the initializer.
+
+ The globals are required by the generated script.
+ """
+ lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment. Note _setattr will be used again below if
+ # does_cache_hash is True.
+ "_setattr = _cached_setattr_get(self)"
+ )
+
+ extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
+ is_frozen, is_slotted, base_attr_map
+ )
+ lines.extend(extra_lines)
+
+ args = [] # Parameters in the definition of __init__
+ pre_init_args = [] # Parameters in the call to __attrs_pre_init__
+ kw_only_args = [] # Used for both 'args' and 'pre_init_args' above
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
+ # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
+ # explicitly provided
+ arg_name = a.alias
+
+ has_factory = isinstance(a.default, Factory)
+ maybe_self = "self" if has_factory and a.default.takes_self else ""
+
+ if a.converter is not None and not isinstance(a.converter, Converter):
+ converter = Converter(a.converter)
+ else:
+ converter = a.converter
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + f"({maybe_self})",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + f"({maybe_self})",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ elif converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ f"attr_dict['{attr_name}'].default",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ f"attr_dict['{attr_name}'].default",
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = f"{arg_name}=attr_dict['{attr_name}'].default"
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ pre_init_args.append(arg_name)
+
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = f"{arg_name}=NOTHING"
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ pre_init_args.append(arg_name)
+ lines.append(f"if {arg_name} is not NOTHING:")
+
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+ if converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+ pre_init_args.append(arg_name)
+
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and converter is None:
+ annotations[arg_name] = a.type
+ elif converter is not None and converter._first_param_type:
+ # Use the type from the converter if present.
+ annotations[arg_name] = converter._first_param_type
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(f" {val_name}(self, {attr_name}, self.{a.name})")
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if call_post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # Because this is set only after __attrs_post_init__ is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to field
+ # values during post-init combined with post-init accessing the hash code
+ # would result in silent bugs.
+ if does_cache_hash:
+ if is_frozen:
+ if is_slotted:
+ init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
+ else:
+ init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
+ else:
+ init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
+ lines.append(init_hash_cache)
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
+
+ lines.append(f"BaseException.__init__(self, {vals})")
+
+ args = ", ".join(args)
+ pre_init_args = ", ".join(pre_init_args)
+ if kw_only_args:
+ # leading comma & kw_only args
+ args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
+ pre_init_kw_only_args = ", ".join(
+ [
+ f"{kw_arg_name}={kw_arg_name}"
+ # We need to remove the defaults from the kw_only_args.
+ for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
+ ]
+ )
+ pre_init_args += ", " if pre_init_args else ""
+ pre_init_args += pre_init_kw_only_args
+
+ if call_pre_init and pre_init_has_args:
+ # If pre init method has arguments, pass the values given to __init__.
+ lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
+
+ # Python <3.12 doesn't allow backslashes in f-strings.
+ NL = "\n "
+ return (
+ f"""def {method_name}(self, {args}):
+ {NL.join(lines) if lines else "pass"}
+""",
+ names_for_globals,
+ annotations,
+ )
+
+
+def _default_init_alias_for(name: str) -> str:
+ """
+ The default __init__ parameter name for a field.
+
+ This performs private-name adjustment via leading-unscore stripping,
+ and is the default value of Attribute.alias if not provided.
+ """
+
+ return name.lstrip("_")
+
+
+class Attribute:
+ """
+ *Read-only* representation of an attribute.
+
+ .. warning::
+
+ You should never instantiate this class yourself.
+
+ The class has *all* arguments of `attr.ib` (except for ``factory`` which is
+ only syntactic sugar for ``default=Factory(...)`` plus the following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``alias`` (`str`): The __init__ parameter name of the attribute, after
+ any explicit overrides and default private-attribute-name handling.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The
+ callables that are used for comparing and ordering objects by this
+ attribute, respectively. These are set by passing a callable to
+ `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also
+ :ref:`comparison customization <custom-comparison>`.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
+ them.
+ - The ``alias`` property exposes the __init__ parameter name of the field,
+ with any overrides and default private-attribute handling applied.
+
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+ .. versionadded:: 22.2.0 *alias*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ # These slots must NOT be reordered because we use them later for
+ # instantiation.
+ __slots__ = ( # noqa: RUF023
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ "alias",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ alias=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _OBJ_SETATTR.__get__(self)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ types.MappingProxyType(dict(metadata)) # Shallow copy
+ if metadata
+ else _EMPTY_METADATA_SINGLETON
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+ bound_setattr("alias", alias)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError
+
+ @classmethod
+ def from_counting_attr(
+ cls, name: str, ca: _CountingAttr, kw_only: bool, type=None
+ ):
+ # The 'kw_only' argument is the class-level setting, and is used if the
+ # attribute itself does not explicitly set 'kw_only'.
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ msg = f"Type annotation and type argument cannot both be present for '{name}'."
+ raise ValueError(msg)
+ return cls(
+ name,
+ ca._default,
+ ca._validator,
+ ca.repr,
+ None,
+ ca.hash,
+ ca.init,
+ False,
+ ca.metadata,
+ type,
+ ca.converter,
+ kw_only if ca.kw_only is None else ca.kw_only,
+ ca.eq,
+ ca.eq_key,
+ ca.order,
+ ca.order_key,
+ ca.on_setattr,
+ ca.alias,
+ )
+
+ # Don't use attrs.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attrs.evolve` but that function does not work
+ with :class:`attrs.Attribute`.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _OBJ_SETATTR.__get__(self)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ (
+ types.MappingProxyType(dict(value))
+ if value
+ else _EMPTY_METADATA_SINGLETON
+ ),
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ alias=_default_init_alias_for(name),
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr:
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "_default",
+ "_validator",
+ "alias",
+ "converter",
+ "counter",
+ "eq",
+ "eq_key",
+ "hash",
+ "init",
+ "kw_only",
+ "metadata",
+ "on_setattr",
+ "order",
+ "order_key",
+ "repr",
+ "type",
+ )
+ __attrs_attrs__ = (
+ *tuple(
+ Attribute(
+ name=name,
+ alias=_default_init_alias_for(name),
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ "alias",
+ )
+ ),
+ Attribute(
+ name="metadata",
+ alias="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ alias,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+ self.alias = alias
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ Raises:
+ DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class ClassProps:
+ """
+ Effective class properties as derived from parameters to `attr.s()` or
+ `define()` decorators.
+
+ This is the same data structure that *attrs* uses internally to decide how
+ to construct the final class.
+
+ Warning:
+
+ This feature is currently **experimental** and is not covered by our
+ strict backwards-compatibility guarantees.
+
+
+ Attributes:
+ is_exception (bool):
+ Whether the class is treated as an exception class.
+
+ is_slotted (bool):
+ Whether the class is `slotted <slotted classes>`.
+
+ has_weakref_slot (bool):
+ Whether the class has a slot for weak references.
+
+ is_frozen (bool):
+ Whether the class is frozen.
+
+ kw_only (KeywordOnly):
+ Whether / how the class enforces keyword-only arguments on the
+ ``__init__`` method.
+
+ collected_fields_by_mro (bool):
+ Whether the class fields were collected by method resolution order.
+ That is, correctly but unlike `dataclasses`.
+
+ added_init (bool):
+ Whether the class has an *attrs*-generated ``__init__`` method.
+
+ added_repr (bool):
+ Whether the class has an *attrs*-generated ``__repr__`` method.
+
+ added_eq (bool):
+ Whether the class has *attrs*-generated equality methods.
+
+ added_ordering (bool):
+ Whether the class has *attrs*-generated ordering methods.
+
+ hashability (Hashability): How `hashable <hashing>` the class is.
+
+ added_match_args (bool):
+ Whether the class supports positional `match <match>` over its
+ fields.
+
+ added_str (bool):
+ Whether the class has an *attrs*-generated ``__str__`` method.
+
+ added_pickling (bool):
+ Whether the class has *attrs*-generated ``__getstate__`` and
+ ``__setstate__`` methods for `pickle`.
+
+ on_setattr_hook (Callable[[Any, Attribute[Any], Any], Any] | None):
+ The class's ``__setattr__`` hook.
+
+ field_transformer (Callable[[Attribute[Any]], Attribute[Any]] | None):
+ The class's `field transformers <transform-fields>`.
+
+ .. versionadded:: 25.4.0
+ """
+
+ class Hashability(enum.Enum):
+ """
+ The hashability of a class.
+
+ .. versionadded:: 25.4.0
+ """
+
+ HASHABLE = "hashable"
+ """Write a ``__hash__``."""
+ HASHABLE_CACHED = "hashable_cache"
+ """Write a ``__hash__`` and cache the hash."""
+ UNHASHABLE = "unhashable"
+ """Set ``__hash__`` to ``None``."""
+ LEAVE_ALONE = "leave_alone"
+ """Don't touch ``__hash__``."""
+
+ class KeywordOnly(enum.Enum):
+ """
+ How attributes should be treated regarding keyword-only parameters.
+
+ .. versionadded:: 25.4.0
+ """
+
+ NO = "no"
+ """Attributes are not keyword-only."""
+ YES = "yes"
+ """Attributes in current class without kw_only=False are keyword-only."""
+ FORCE = "force"
+ """All attributes are keyword-only."""
+
+ __slots__ = ( # noqa: RUF023 -- order matters for __init__
+ "is_exception",
+ "is_slotted",
+ "has_weakref_slot",
+ "is_frozen",
+ "kw_only",
+ "collected_fields_by_mro",
+ "added_init",
+ "added_repr",
+ "added_eq",
+ "added_ordering",
+ "hashability",
+ "added_match_args",
+ "added_str",
+ "added_pickling",
+ "on_setattr_hook",
+ "field_transformer",
+ )
+
+ def __init__(
+ self,
+ is_exception,
+ is_slotted,
+ has_weakref_slot,
+ is_frozen,
+ kw_only,
+ collected_fields_by_mro,
+ added_init,
+ added_repr,
+ added_eq,
+ added_ordering,
+ hashability,
+ added_match_args,
+ added_str,
+ added_pickling,
+ on_setattr_hook,
+ field_transformer,
+ ):
+ self.is_exception = is_exception
+ self.is_slotted = is_slotted
+ self.has_weakref_slot = has_weakref_slot
+ self.is_frozen = is_frozen
+ self.kw_only = kw_only
+ self.collected_fields_by_mro = collected_fields_by_mro
+ self.added_init = added_init
+ self.added_repr = added_repr
+ self.added_eq = added_eq
+ self.added_ordering = added_ordering
+ self.hashability = hashability
+ self.added_match_args = added_match_args
+ self.added_str = added_str
+ self.added_pickling = added_pickling
+ self.on_setattr_hook = on_setattr_hook
+ self.field_transformer = field_transformer
+
+ @property
+ def is_hashable(self):
+ return (
+ self.hashability is ClassProps.Hashability.HASHABLE
+ or self.hashability is ClassProps.Hashability.HASHABLE_CACHED
+ )
+
+
+_cas = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ alias=_default_init_alias_for(name),
+ )
+ for name in ClassProps.__slots__
+]
+
+ClassProps = _add_eq(_add_repr(ClassProps, attrs=_cas), attrs=_cas)
+
+
+class Factory:
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attrs.field`, the factory is used to
+ generate a new value.
+
+ Args:
+ factory (typing.Callable):
+ A callable that takes either none or exactly one mandatory
+ positional argument depending on *takes_self*.
+
+ takes_self (bool):
+ Pass the partially initialized instance that is being initialized
+ as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+class Converter:
+ """
+ Stores a converter callable.
+
+ Allows for the wrapped converter to take additional arguments. The
+ arguments are passed in the order they are documented.
+
+ Args:
+ converter (Callable): A callable that converts the passed value.
+
+ takes_self (bool):
+ Pass the partially initialized instance that is being initialized
+ as a positional argument. (default: `False`)
+
+ takes_field (bool):
+ Pass the field definition (an :class:`Attribute`) into the
+ converter as a positional argument. (default: `False`)
+
+ .. versionadded:: 24.1.0
+ """
+
+ __slots__ = (
+ "__call__",
+ "_first_param_type",
+ "_global_name",
+ "converter",
+ "takes_field",
+ "takes_self",
+ )
+
+ def __init__(self, converter, *, takes_self=False, takes_field=False):
+ self.converter = converter
+ self.takes_self = takes_self
+ self.takes_field = takes_field
+
+ ex = _AnnotationExtractor(converter)
+ self._first_param_type = ex.get_first_param_type()
+
+ if not (self.takes_self or self.takes_field):
+ self.__call__ = lambda value, _, __: self.converter(value)
+ elif self.takes_self and not self.takes_field:
+ self.__call__ = lambda value, instance, __: self.converter(
+ value, instance
+ )
+ elif not self.takes_self and self.takes_field:
+ self.__call__ = lambda value, __, field: self.converter(
+ value, field
+ )
+ else:
+ self.__call__ = lambda value, instance, field: self.converter(
+ value, instance, field
+ )
+
+ rt = ex.get_return_type()
+ if rt is not None:
+ self.__call__.__annotations__["return"] = rt
+
+ @staticmethod
+ def _get_global_name(attr_name: str) -> str:
+ """
+ Return the name that a converter for an attribute name *attr_name*
+ would have.
+ """
+ return f"__attr_converter_{attr_name}"
+
+ def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
+ """
+ Return a string that calls the converter for an attribute name
+ *attr_name* and the value in variable named *value_var* according to
+ `self.takes_self` and `self.takes_field`.
+ """
+ if not (self.takes_self or self.takes_field):
+ return f"{self._get_global_name(attr_name)}({value_var})"
+
+ if self.takes_self and self.takes_field:
+ return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
+
+ if self.takes_self:
+ return f"{self._get_global_name(attr_name)}({value_var}, self)"
+
+ return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"
+
+ def __getstate__(self):
+ """
+ Return a dict containing only converter and takes_self -- the rest gets
+ computed when loading.
+ """
+ return {
+ "converter": self.converter,
+ "takes_self": self.takes_self,
+ "takes_field": self.takes_field,
+ }
+
+ def __setstate__(self, state):
+ """
+ Load instance from state.
+ """
+ self.__init__(**state)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in ("converter", "takes_self", "takes_field")
+]
+
+Converter = _add_hash(
+ _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
+)
+
+
+def make_class(
+ name, attrs, bases=(object,), class_body=None, **attributes_arguments
+):
+ r"""
+ A quick way to create a new class called *name* with *attrs*.
+
+ .. note::
+
+ ``make_class()`` is a thin wrapper around `attr.s`, not `attrs.define`
+ which means that it doesn't come with some of the improved defaults.
+
+ For example, if you want the same ``on_setattr`` behavior as in
+ `attrs.define`, you have to pass the hooks yourself: ``make_class(...,
+ on_setattr=setters.pipe(setters.convert, setters.validate)``
+
+ .. warning::
+
+ It is *your* duty to ensure that the class name and the attribute names
+ are valid identifiers. ``make_class()`` will *not* validate them for
+ you.
+
+ Args:
+ name (str): The name for the new class.
+
+ attrs (list | dict):
+ A list of names or a dictionary of mappings of names to `attr.ib`\
+ s / `attrs.field`\ s.
+
+ The order is deduced from the order of the names or attributes
+ inside *attrs*. Otherwise the order of the definition of the
+ attributes is used.
+
+ bases (tuple[type, ...]): Classes that the new class will subclass.
+
+ class_body (dict):
+ An optional dictionary of class attributes for the new class.
+
+ attributes_arguments: Passed unmodified to `attr.s`.
+
+ Returns:
+ type: A new class with *attrs*.
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ .. versionchanged:: 23.2.0 *class_body*
+ .. versionchanged:: 25.2.0 Class names can now be unicode.
+ """
+ # Class identifiers are converted into the normal form NFKC while parsing
+ name = unicodedata.normalize("NFKC", name)
+
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = {a: attrib() for a in attrs}
+ else:
+ msg = "attrs argument must be a dict or a list."
+ raise TypeError(msg)
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if class_body is not None:
+ body.update(class_body)
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ with contextlib.suppress(AttributeError, ValueError):
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
+ # Only add type annotations now or "_attrs()" will complain:
+ cls.__annotations__ = {
+ k: v.type for k, v in cls_dict.items() if v.type is not None
+ }
+ return cls
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, unsafe_hash=True)
+class _AndValidator:
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ Args:
+ validators (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if they
+ have any.
+
+ converters (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ return_instance = any(isinstance(c, Converter) for c in converters)
+
+ if return_instance:
+
+ def pipe_converter(val, inst, field):
+ for c in converters:
+ val = (
+ c(val, inst, field) if isinstance(c, Converter) else c(val)
+ )
+
+ return val
+
+ else:
+
+ def pipe_converter(val):
+ for c in converters:
+ val = c(val)
+
+ return val
+
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = TypeVar("A")
+ pipe_converter.__annotations__.update({"val": A, "return": A})
+ else:
+ # Get parameter type from first converter.
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
+ if t:
+ pipe_converter.__annotations__["val"] = t
+
+ last = converters[-1]
+ if not PY_3_11_PLUS and isinstance(last, Converter):
+ last = last.__call__
+
+ # Get return type from last converter.
+ rt = _AnnotationExtractor(last).get_return_type()
+ if rt:
+ pipe_converter.__annotations__["return"] = rt
+
+ if return_instance:
+ return Converter(pipe_converter, takes_self=True, takes_field=True)
+ return pipe_converter
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_next_gen.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_next_gen.py"
new file mode 100644
index 0000000..4ccd0da
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_next_gen.py"
@@ -0,0 +1,674 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are keyword-only APIs that call `attr.s` and `attr.ib` with different
+default values.
+"""
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ _DEFAULT_ON_SETATTR,
+ NOTHING,
+ _frozen_setattrs,
+ attrib,
+ attrs,
+)
+from .exceptions import NotAnAttrsClassError, UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ unsafe_hash=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+ force_kw_only=False,
+):
+ r"""
+ A class decorator that adds :term:`dunder methods` according to
+ :term:`fields <field>` specified using :doc:`type annotations <types>`,
+ `field()` calls, or the *these* argument.
+
+ Since *attrs* patches or replaces an existing class, you cannot use
+ `object.__init_subclass__` with *attrs* classes, because it runs too early.
+ As a replacement, you can define ``__attrs_init_subclass__`` on your class.
+ It will be called by *attrs* classes that subclass it after they're
+ created. See also :ref:`init-subclass`.
+
+ Args:
+ slots (bool):
+ Create a :term:`slotted class <slotted classes>` that's more
+ memory-efficient. Slotted classes are generally superior to the
+ default dict classes, but have some gotchas you should know about,
+ so we encourage you to read the :term:`glossary entry <slotted
+ classes>`.
+
+ auto_detect (bool):
+ Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
+ explicitly, assume they are set to True **unless any** of the
+ involved methods for one of the arguments is implemented in the
+ *current* class (meaning, it is *not* inherited from some base
+ class).
+
+ So, for example by implementing ``__eq__`` on a class yourself,
+ *attrs* will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a
+ sensible ``__ne__`` by default, so it *should* be enough to only
+ implement ``__eq__`` in most cases).
+
+ Passing :data:`True` or :data:`False` to *init*, *repr*, *eq*, or *hash*
+ overrides whatever *auto_detect* would determine.
+
+ auto_exc (bool):
+ If the class subclasses `BaseException` (which implicitly includes
+ any subclass of any exception), the following happens to behave
+ like a well-behaved Python exception class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids [#]_ ,
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the
+ ``args`` attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base
+ classes.
+
+ .. [#]
+ Note that *attrs* will *not* remove existing implementations of
+ ``__hash__`` or the equality methods. It just won't add own
+ ones.
+
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+ A callable that is run whenever the user attempts to set an
+ attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same
+ arguments as validators: the instance, the attribute that is being
+ modified, and the new value.
+
+ If no exception is raised, the attribute is set to the return value
+ of the callable.
+
+ If a list of callables is passed, they're automatically wrapped in
+ an `attrs.setters.pipe`.
+
+ If left None, the default behavior is to run converters and
+ validators whenever an attribute is set.
+
+ init (bool):
+ Create a ``__init__`` method that initializes the *attrs*
+ attributes. Leading underscores are stripped for the argument name,
+ unless an alias is set on the attribute.
+
+ .. seealso::
+ `init` shows advanced ways to customize the generated
+ ``__init__`` method, including executing code before and after.
+
+ repr(bool):
+ Create a ``__repr__`` method with a human readable representation
+ of *attrs* attributes.
+
+ str (bool):
+ Create a ``__str__`` method that is identical to ``__repr__``. This
+ is usually not necessary except for `Exception`\ s.
+
+ eq (bool | None):
+ If True or None (default), add ``__eq__`` and ``__ne__`` methods
+ that check two instances for equality.
+
+ .. seealso::
+ `comparison` describes how to customize the comparison behavior
+ going as far comparing NumPy arrays.
+
+ order (bool | None):
+ If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
+ methods that behave like *eq* above and allow instances to be
+ ordered.
+
+ They compare the instances as if they were tuples of their *attrs*
+ attributes if and only if the types of both classes are
+ *identical*.
+
+ If `None` mirror value of *eq*.
+
+ .. seealso:: `comparison`
+
+ unsafe_hash (bool | None):
+ If None (default), the ``__hash__`` method is generated according
+ how *eq* and *frozen* are set.
+
+ 1. If *both* are True, *attrs* will generate a ``__hash__`` for
+ you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
+ to None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning
+ the ``__hash__`` method of the base class will be used. If the
+ base class is `object`, this means it will fall back to id-based
+ hashing.
+
+ Although not recommended, you can decide for yourself and force
+ *attrs* to create one (for example, if the class is immutable even
+ though you didn't freeze it programmatically) by passing True or
+ not. Both of these cases are rather special and should be used
+ carefully.
+
+ .. seealso::
+
+ - Our documentation on `hashing`,
+ - Python's documentation on `object.__hash__`,
+ - and the `GitHub issue that led to the default \ behavior
+ <https://github.com/python-attrs/attrs/issues/136>`_ for more
+ details.
+
+ hash (bool | None):
+ Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
+
+ cache_hash (bool):
+ Ensure that the object's hash code is computed only once and stored
+ on the object. If this is set to True, hashing must be either
+ explicitly or implicitly enabled for this class. If the hash code
+ is cached, avoid any reassignments of fields involved in hash code
+ computation or mutations of the objects those fields point to after
+ object creation. If such changes occur, the behavior of the
+ object's hash code is undefined.
+
+ frozen (bool):
+ Make instances immutable after initialization. If someone attempts
+ to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
+ is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__``
+ method on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ <how-frozen>` when initializing new instances. In other
+ words: ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You
+ can circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ kw_only (bool):
+ Make attributes keyword-only in the generated ``__init__`` (if
+ *init* is False, this parameter is ignored). Attributes that
+ explicitly set ``kw_only=False`` are not affected; base class
+ attributes are also not affected.
+
+ Also see *force_kw_only*.
+
+ weakref_slot (bool):
+ Make instances weak-referenceable. This has no effect unless
+ *slots* is True.
+
+ field_transformer (~typing.Callable | None):
+ A function that is called with the original class object and all
+ fields right before *attrs* finalizes the class. You can use this,
+ for example, to automatically add converters or validators to
+ fields based on their types.
+
+ .. seealso:: `transform-fields`
+
+ match_args (bool):
+ If True (default), set ``__match_args__`` on the class to support
+ :pep:`634` (*Structural Pattern Matching*). It is a tuple of all
+ non-keyword-only ``__init__`` parameter names on Python 3.10 and
+ later. Ignored on older Python versions.
+
+ collect_by_mro (bool):
+ If True, *attrs* collects attributes from base classes correctly
+ according to the `method resolution order
+ <https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
+ will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
+
+ See also `issue #428
+ <https://github.com/python-attrs/attrs/issues/428>`_.
+
+ force_kw_only (bool):
+ A back-compat flag for restoring pre-25.4.0 behavior. If True and
+ ``kw_only=True``, all attributes are made keyword-only, including
+ base class attributes, and those set to ``kw_only=False`` at the
+ attribute level. Defaults to False.
+
+ See also `issue #980
+ <https://github.com/python-attrs/attrs/issues/980>`_.
+
+ getstate_setstate (bool | None):
+ .. note::
+
+ This is usually only interesting for slotted classes and you
+ should probably just set *auto_detect* to True.
+
+ If True, ``__getstate__`` and ``__setstate__`` are generated and
+ attached to the class. This is necessary for slotted classes to be
+ pickleable. If left None, it's True by default for slotted classes
+ and False for dict classes.
+
+ If *auto_detect* is True, and *getstate_setstate* is left None, and
+ **either** ``__getstate__`` or ``__setstate__`` is detected
+ directly on the class (meaning: not inherited), it is set to False
+ (this is usually what you want).
+
+ auto_attribs (bool | None):
+ If True, look at type annotations to determine which attributes to
+ use, like `dataclasses`. If False, it will only look for explicit
+ :func:`field` class attributes, like classic *attrs*.
+
+ If left None, it will guess:
+
+ 1. If any attributes are annotated and no unannotated
+ `attrs.field`\ s are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.field`\ s.
+
+ If *attrs* decides to look at type annotations, **all** fields
+ **must** be annotated. If *attrs* encounters a field that is set to
+ a :func:`field` / `attr.ib` but lacks a type annotation, an
+ `attrs.exceptions.UnannotatedAttributeError` is raised. Use
+ ``field_name: typing.Any = field(...)`` if you don't want to set a
+ type.
+
+ .. warning::
+
+ For features that use the attribute name to create decorators
+ (for example, :ref:`validators <validators>`), you still *must*
+ assign :func:`field` / `attr.ib` to them. Otherwise Python will
+ either not find the name or try to use the default value to
+ call, for example, ``validator`` on it.
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `field()` are **ignored**.
+
+ these (dict[str, object]):
+ A dictionary of name to the (private) return value of `field()`
+ mappings. This is useful to avoid the definition of your attributes
+ within the class body because you can't (for example, if you want
+ to add ``__repr__`` methods to Django models) or don't want to.
+
+ If *these* is not `None`, *attrs* will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ The order is deduced from the order of the attributes inside
+ *these*.
+
+ Arguably, this is a rather obscure feature.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ .. versionadded:: 22.2.0
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+ .. versionchanged:: 24.1.0
+ Instances are not compared as tuples of attributes anymore, but using a
+ big ``and`` condition. This is faster and has more correct behavior for
+ uncomparable values like `math.nan`.
+ .. versionadded:: 24.1.0
+ If a class has an *inherited* classmethod called
+ ``__attrs_init_subclass__``, it is executed after the class is created.
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+ .. versionadded:: 24.3.0
+ Unless already present, a ``__replace__`` method is automatically
+ created for `copy.replace` (Python 3.13+ only).
+ .. versionchanged:: 25.4.0
+ *kw_only* now only applies to attributes defined in the current class,
+ and respects attribute-level ``kw_only=False`` settings.
+ .. versionadded:: 25.4.0
+ Added *force_kw_only* to go back to the previous *kw_only* behavior.
+
+ .. note::
+
+ The main differences to the classic `attr.s` are:
+
+ - Automatically detect whether or not *auto_attribs* should be `True`
+ (c.f. *auto_attribs* parameter).
+ - Converters and validators run when attributes are set by default --
+ if *frozen* is `False`.
+ - *slots=True*
+
+ Usually, this has only upsides and few visible effects in everyday
+ programming. But it *can* lead to some surprising behaviors, so
+ please make sure to read :term:`slotted classes`.
+
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - *force_kw_only=False*
+ - Some options that were only relevant on Python 2 or were kept around
+ for backwards-compatibility have been removed.
+
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ unsafe_hash=unsafe_hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ force_kw_only=force_kw_only,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _DEFAULT_ON_SETATTR
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
+ raise ValueError(msg)
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=None,
+ eq=None,
+ order=None,
+ on_setattr=None,
+ alias=None,
+):
+ """
+ Create a new :term:`field` / :term:`attribute` on a class.
+
+ .. warning::
+
+ Does **nothing** unless the class is also decorated with
+ `attrs.define` (or similar)!
+
+ Args:
+ default:
+ A value that is used if an *attrs*-generated ``__init__`` is used
+ and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `attrs.Factory`, its callable will
+ be used to construct a new value (useful for mutable data types
+ like lists or dicts).
+
+ If a default is not set (or set manually to `attrs.NOTHING`), a
+ value *must* be supplied when instantiating; otherwise a
+ `TypeError` will be raised.
+
+ .. seealso:: `defaults`
+
+ factory (~typing.Callable):
+ Syntactic sugar for ``default=attr.Factory(factory)``.
+
+ validator (~typing.Callable | list[~typing.Callable]):
+ Callable that is called by *attrs*-generated ``__init__`` methods
+ after the instance has been initialized. They receive the
+ initialized instance, the :func:`~attrs.Attribute`, and the passed
+ value.
+
+ The return value is *not* inspected so the validator has to throw
+ an exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
+
+ The validator can also be set using decorator notation as shown
+ below.
+
+ .. seealso:: :ref:`validators`
+
+ repr (bool | ~typing.Callable):
+ Include this attribute in the generated ``__repr__`` method. If
+ True, include the attribute; if False, omit it. By default, the
+ built-in ``repr()`` function is used. To override how the attribute
+ value is formatted, pass a ``callable`` that takes a single value
+ and returns a string. Note that the resulting string is used as-is,
+ which means it will be used directly *instead* of calling
+ ``repr()`` (the default).
+
+ eq (bool | ~typing.Callable):
+ If True (default), include this attribute in the generated
+ ``__eq__`` and ``__ne__`` methods that check two instances for
+ equality. To override how the attribute value is compared, pass a
+ callable that takes a single value and returns the value to be
+ compared.
+
+ .. seealso:: `comparison`
+
+ order (bool | ~typing.Callable):
+ If True (default), include this attributes in the generated
+ ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
+ override how the attribute value is ordered, pass a callable that
+ takes a single value and returns the value to be ordered.
+
+ .. seealso:: `comparison`
+
+ hash (bool | None):
+ Include this attribute in the generated ``__hash__`` method. If
+ None (default), mirror *eq*'s value. This is the correct behavior
+ according the Python spec. Setting this value to anything else
+ than None is *discouraged*.
+
+ .. seealso:: `hashing`
+
+ init (bool):
+ Include this attribute in the generated ``__init__`` method.
+
+ It is possible to set this to False and set a default value. In
+ that case this attributed is unconditionally initialized with the
+ specified default value or factory.
+
+ .. seealso:: `init`
+
+ converter (typing.Callable | Converter):
+ A callable that is called by *attrs*-generated ``__init__`` methods
+ to convert attribute's value to the desired format.
+
+ If a vanilla callable is passed, it is given the passed-in value as
+ the only positional argument. It is possible to receive additional
+ arguments by wrapping the callable in a `Converter`.
+
+ Either way, the returned value will be used as the new value of the
+ attribute. The value is converted before being passed to the
+ validator, if any.
+
+ .. seealso:: :ref:`converters`
+
+ metadata (dict | None):
+ An arbitrary mapping, to be used by third-party code.
+
+ .. seealso:: `extending-metadata`.
+
+ type (type):
+ The type of the attribute. Nowadays, the preferred method to
+ specify the type is using a variable annotation (see :pep:`526`).
+ This argument is provided for backwards-compatibility and for usage
+ with `make_class`. Regardless of the approach used, the type will
+ be stored on ``Attribute.type``.
+
+ Please note that *attrs* doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for `static type
+ checking <types>`.
+
+ kw_only (bool | None):
+ Make this attribute keyword-only in the generated ``__init__`` (if
+ *init* is False, this parameter is ignored). If None (default),
+ mirror the setting from `attrs.define`.
+
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+ Allows to overwrite the *on_setattr* setting from `attr.s`. If left
+ None, the *on_setattr* value from `attr.s` is used. Set to
+ `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `define()`.
+
+ alias (str | None):
+ Override this attribute's parameter name in the generated
+ ``__init__`` method. If left None, default to ``name`` stripped
+ of leading underscores. See `private-attributes`.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionadded:: 22.2.0 *alias*
+ .. versionadded:: 23.1.0
+ The *type* parameter has been re-added; mostly for `attrs.make_class`.
+ Please note that type checkers ignore this metadata.
+ .. versionchanged:: 25.4.0
+ *kw_only* can now be None, and its default is also changed from False to
+ None.
+
+ .. seealso::
+
+ `attr.ib`
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ type=type,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ alias=alias,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
+
+
+def inspect(cls):
+ """
+ Inspect the class and return its effective build parameters.
+
+ Warning:
+ This feature is currently **experimental** and is not covered by our
+ strict backwards-compatibility guarantees.
+
+ Args:
+ cls: The *attrs*-decorated class to inspect.
+
+ Returns:
+ The effective build parameters of the class.
+
+ Raises:
+ NotAnAttrsClassError: If the class is not an *attrs*-decorated class.
+
+ .. versionadded:: 25.4.0
+ """
+ try:
+ return cls.__dict__["__attrs_props__"]
+ except KeyError:
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg) from None
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_typing_compat.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_typing_compat.pyi"
new file mode 100644
index 0000000..ca7b71e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_typing_compat.pyi"
@@ -0,0 +1,15 @@
+from typing import Any, ClassVar, Protocol
+
+# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
+MYPY = False
+
+if MYPY:
+ # A protocol to be able to statically accept an attrs class.
+ class AttrsInstance_(Protocol):
+ __attrs_attrs__: ClassVar[Any]
+
+else:
+ # For type checkers without plug-in support use an empty protocol that
+ # will (hopefully) be combined into a union.
+ class AttrsInstance_(Protocol):
+ pass
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_version_info.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_version_info.py"
new file mode 100644
index 0000000..27f1888
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_version_info.py"
@@ -0,0 +1,89 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
+
+ def __hash__(self):
+ return hash((self.year, self.minor, self.micro, self.releaselevel))
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_version_info.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_version_info.pyi"
new file mode 100644
index 0000000..45ced08
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/_version_info.pyi"
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/converters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/converters.py"
new file mode 100644
index 0000000..0a79dee
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/converters.py"
@@ -0,0 +1,162 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Converter, Factory, pipe
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to `None`.
+
+ Type annotations will be inferred from the wrapped converter's, if it has
+ any.
+
+ Args:
+ converter (typing.Callable):
+ the converter that is used for non-`None` values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ if isinstance(converter, Converter):
+
+ def optional_converter(val, inst, field):
+ if val is None:
+ return None
+ return converter(val, inst, field)
+
+ else:
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ xtr = _AnnotationExtractor(converter)
+
+ t = xtr.get_first_param_type()
+ if t:
+ optional_converter.__annotations__["val"] = typing.Optional[t]
+
+ rt = xtr.get_return_type()
+ if rt:
+ optional_converter.__annotations__["return"] = typing.Optional[rt]
+
+ if isinstance(converter, Converter):
+ return Converter(optional_converter, takes_self=True, takes_field=True)
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace `None` values by *default* or the result
+ of *factory*.
+
+ Args:
+ default:
+ Value to be used if `None` is passed. Passing an instance of
+ `attrs.Factory` is supported, however the ``takes_self`` option is
+ *not*.
+
+ factory (typing.Callable):
+ A callable that takes no parameters whose result is used if `None`
+ is passed.
+
+ Raises:
+ TypeError: If **neither** *default* or *factory* is passed.
+
+ TypeError: If **both** *default* and *factory* are passed.
+
+ ValueError:
+ If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ msg = "Must pass either `default` or `factory`."
+ raise TypeError(msg)
+
+ if default is not NOTHING and factory is not None:
+ msg = "Must pass either `default` or `factory` but not both."
+ raise TypeError(msg)
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ msg = "`takes_self` is not supported by default_if_none."
+ raise ValueError(msg)
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (for example, from environment variables) to real
+ booleans.
+
+ Values mapping to `True`:
+
+ - ``True``
+ - ``"true"`` / ``"t"``
+ - ``"yes"`` / ``"y"``
+ - ``"on"``
+ - ``"1"``
+ - ``1``
+
+ Values mapping to `False`:
+
+ - ``False``
+ - ``"false"`` / ``"f"``
+ - ``"no"`` / ``"n"``
+ - ``"off"``
+ - ``"0"``
+ - ``0``
+
+ Raises:
+ ValueError: For any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+
+ if val in (True, "true", "t", "yes", "y", "on", "1", 1):
+ return True
+ if val in (False, "false", "f", "no", "n", "off", "0", 0):
+ return False
+
+ msg = f"Cannot convert value to bool: {val!r}"
+ raise ValueError(msg)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/converters.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/converters.pyi"
new file mode 100644
index 0000000..12bd0c4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/converters.pyi"
@@ -0,0 +1,19 @@
+from typing import Callable, Any, overload
+
+from attrs import _ConverterType, _CallableConverterType
+
+@overload
+def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ...
+@overload
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+@overload
+def optional(converter: _CallableConverterType) -> _CallableConverterType: ...
+@overload
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: Any) -> _CallableConverterType: ...
+@overload
+def default_if_none(
+ *, factory: Callable[[], Any]
+) -> _CallableConverterType: ...
+def to_bool(val: str | int | bool) -> bool: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/exceptions.py"
new file mode 100644
index 0000000..3b7abb8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/exceptions.py"
@@ -0,0 +1,95 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+from typing import ClassVar
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args: ClassVar[tuple[str]] = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An *attrs* function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-*attrs* class has been passed into an *attrs* function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set when defining the field and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has a field without a type annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an *attrs* feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A field requiring a callable has been set with a value that is not
+ callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/exceptions.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/exceptions.pyi"
new file mode 100644
index 0000000..f268011
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/exceptions.pyi"
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/filters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/filters.py"
new file mode 100644
index 0000000..689b170
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/filters.py"
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
+"""
+
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isinstance(cls, type)),
+ frozenset(cls for cls in what if isinstance(cls, str)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Create a filter that only allows *what*.
+
+ Args:
+ what (list[type, str, attrs.Attribute]):
+ What to include. Can be a type, a name, or an attribute.
+
+ Returns:
+ Callable:
+ A callable that can be passed to `attrs.asdict`'s and
+ `attrs.astuple`'s *filter* argument.
+
+ .. versionchanged:: 23.1.0 Accept strings with field names.
+ """
+ cls, names, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return (
+ value.__class__ in cls
+ or attribute.name in names
+ or attribute in attrs
+ )
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Create a filter that does **not** allow *what*.
+
+ Args:
+ what (list[type, str, attrs.Attribute]):
+ What to exclude. Can be a type, a name, or an attribute.
+
+ Returns:
+ Callable:
+ A callable that can be passed to `attrs.asdict`'s and
+ `attrs.astuple`'s *filter* argument.
+
+ .. versionchanged:: 23.3.0 Accept field name string as input argument
+ """
+ cls, names, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return not (
+ value.__class__ in cls
+ or attribute.name in names
+ or attribute in attrs
+ )
+
+ return exclude_
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/filters.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/filters.pyi"
new file mode 100644
index 0000000..974abdc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/filters.pyi"
@@ -0,0 +1,6 @@
+from typing import Any
+
+from . import Attribute, _FilterType
+
+def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
+def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/setters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/setters.py"
new file mode 100644
index 0000000..78b0839
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/setters.py"
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ # This can be removed once we drop 3.8 and use attrs.Converter instead.
+ from ._make import Converter
+
+ if not isinstance(c, Converter):
+ return c(new_value)
+
+ return c(new_value, instance, attrib)
+
+ return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# Sphinx's autodata stopped working, so the docstring is inlined in the API
+# docs.
+NO_OP = object()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/setters.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/setters.pyi"
new file mode 100644
index 0000000..73abf36
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/setters.pyi"
@@ -0,0 +1,20 @@
+from typing import Any, NewType, NoReturn, TypeVar
+
+from . import Attribute
+from attrs import _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/validators.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/validators.py"
new file mode 100644
index 0000000..837e003
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/validators.py"
@@ -0,0 +1,748 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+import operator
+import re
+
+from contextlib import contextmanager
+from re import Pattern
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .converters import default_if_none
+from .exceptions import NotCallableError
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "min_len",
+ "not_",
+ "optional",
+ "or_",
+ "set_disabled",
+]
+
+
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ Args:
+ disabled (bool): If `True`, disable running all validators.
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ Returns:
+ bool:`True` if validators are currently disabled.
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InstanceOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
+ raise TypeError(
+ msg,
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return f"<instance_of validator for type {self.type!r}>"
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called with a
+ wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ Args:
+ type (type | tuple[type]): The type to check for.
+
+ Raises:
+ TypeError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected type, and the value it got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
+ raise ValueError(
+ msg,
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return f"<matches_re validator for pattern {self.pattern!r}>"
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called with a
+ string that doesn't match *regex*.
+
+ Args:
+ regex (str, re.Pattern):
+ A regex string or precompiled pattern to match against
+
+ flags (int):
+ Flags that will be passed to the underlying re function (default 0)
+
+ func (typing.Callable):
+ Which underlying `re` function to call. Valid options are
+ `re.fullmatch`, `re.search`, and `re.match`; the default `None`
+ means `re.fullmatch`. For performance reasons, the pattern is
+ always precompiled using `re.compile`.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ valid_funcs = (re.fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ msg = "'func' must be one of {}.".format(
+ ", ".join(
+ sorted((e and e.__name__) or "None" for e in set(valid_funcs))
+ )
+ )
+ raise ValueError(msg)
+
+ if isinstance(regex, Pattern):
+ if flags:
+ msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
+ raise TypeError(msg)
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ match_func = pattern.fullmatch
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OptionalValidator:
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return f"<optional validator for {self.validator!r} or None>"
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to `None` in addition to satisfying the requirements of
+ the sub-validator.
+
+ Args:
+ validator
+ (typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
+ A validator (or validators) that is used for non-`None` values.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
+ """
+ if isinstance(validator, (list, tuple)):
+ return _OptionalValidator(_AndValidator(validator))
+
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InValidator:
+ options = attrib()
+ _original_options = attrib(hash=False)
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
+ raise ValueError(
+ msg,
+ attr,
+ self._original_options,
+ value,
+ )
+
+ def __repr__(self):
+ return f"<in_ validator with options {self._original_options!r}>"
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called with a
+ value that does not belong in the *options* provided.
+
+ The check is performed using ``value in options``, so *options* has to
+ support that operation.
+
+ To keep the validator hashable, dicts, lists, and sets are transparently
+ transformed into a `tuple`.
+
+ Args:
+ options: Allowed options.
+
+ Raises:
+ ValueError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected options, and the value it got.
+
+ .. versionadded:: 17.1.0
+ .. versionchanged:: 22.1.0
+ The ValueError was incomplete until now and only contained the human
+ readable error message. Now it contains all the information that has
+ been promised since 17.1.0.
+ .. versionchanged:: 24.1.0
+ *options* that are a list, dict, or a set are now transformed into a
+ tuple to keep the validator hashable.
+ """
+ repr_options = options
+ if isinstance(options, (list, dict, set)):
+ options = tuple(options)
+
+ return _InValidator(options, repr_options)
+
+
+@attrs(repr=False, slots=False, unsafe_hash=True)
+class _IsCallableValidator:
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return "<is_callable validator>"
+
+
+def is_callable():
+ """
+ A validator that raises a `attrs.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute that is
+ not callable.
+
+ .. versionadded:: 19.1.0
+
+ Raises:
+ attrs.exceptions.NotCallableError:
+ With a human readable error message containing the attribute
+ (`attrs.Attribute`) name, and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepIterable:
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else f" {self.iterable_validator!r}"
+ )
+ return (
+ f"<deep_iterable validator for{iterable_identifier}"
+ f" iterables of {self.member_validator!r}>"
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ Args:
+ member_validator: Validator(s) to apply to iterable members.
+
+ iterable_validator:
+ Validator(s) to apply to iterable itself (optional).
+
+ Raises
+ TypeError: if any sub-validators fail
+
+ .. versionadded:: 19.1.0
+
+ .. versionchanged:: 25.4.0
+ *member_validator* and *iterable_validator* can now be a list or tuple
+ of validators.
+ """
+ if isinstance(member_validator, (list, tuple)):
+ member_validator = and_(*member_validator)
+ if isinstance(iterable_validator, (list, tuple)):
+ iterable_validator = and_(*iterable_validator)
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepMapping:
+ key_validator = attrib(validator=optional(is_callable()))
+ value_validator = attrib(validator=optional(is_callable()))
+ mapping_validator = attrib(validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ if self.key_validator is not None:
+ self.key_validator(inst, attr, key)
+ if self.value_validator is not None:
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return f"<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>"
+
+
+def deep_mapping(
+ key_validator=None, value_validator=None, mapping_validator=None
+):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ All validators are optional, but at least one of *key_validator* or
+ *value_validator* must be provided.
+
+ Args:
+ key_validator: Validator(s) to apply to dictionary keys.
+
+ value_validator: Validator(s) to apply to dictionary values.
+
+ mapping_validator:
+ Validator(s) to apply to top-level mapping attribute.
+
+ .. versionadded:: 19.1.0
+
+ .. versionchanged:: 25.4.0
+ *key_validator* and *value_validator* are now optional, but at least one
+ of them must be provided.
+
+ .. versionchanged:: 25.4.0
+ *key_validator*, *value_validator*, and *mapping_validator* can now be a
+ list or tuple of validators.
+
+ Raises:
+ TypeError: If any sub-validator fails on validation.
+
+ ValueError:
+ If neither *key_validator* nor *value_validator* is provided on
+ instantiation.
+ """
+ if key_validator is None and value_validator is None:
+ msg = (
+ "At least one of key_validator or value_validator must be provided"
+ )
+ raise ValueError(msg)
+
+ if isinstance(key_validator, (list, tuple)):
+ key_validator = and_(*key_validator)
+ if isinstance(value_validator, (list, tuple)):
+ value_validator = and_(*value_validator)
+ if isinstance(mapping_validator, (list, tuple)):
+ mapping_validator = and_(*mapping_validator)
+
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f"<Validator for x {self.compare_op} {self.bound}>"
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number larger or equal to *val*.
+
+ The validator uses `operator.lt` to compare the values.
+
+ Args:
+ val: Exclusive upper bound for values.
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number greater than *val*.
+
+ The validator uses `operator.le` to compare the values.
+
+ Args:
+ val: Inclusive upper bound for values.
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number smaller than *val*.
+
+ The validator uses `operator.ge` to compare the values.
+
+ Args:
+ val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number smaller or equal to *val*.
+
+ The validator uses `operator.gt` to compare the values.
+
+ Args:
+ val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f"<max_len validator for {self.max_length}>"
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ Args:
+ length (int): Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+ min_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) < self.min_length:
+ msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f"<min_len validator for {self.min_length}>"
+
+
+def min_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is shorter than *length*.
+
+ Args:
+ length (int): Minimum length of the string or iterable
+
+ .. versionadded:: 22.1.0
+ """
+ return _MinLengthValidator(length)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _SubclassOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not issubclass(value, self.type):
+ msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
+ raise TypeError(
+ msg,
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return f"<subclass_of validator for type {self.type!r}>"
+
+
+def _subclass_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called with a
+ wrong type for this particular attribute (checks are performed using
+ `issubclass` therefore it's also valid to pass a tuple of types).
+
+ Args:
+ type (type | tuple[type, ...]): The type(s) to check for.
+
+ Raises:
+ TypeError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected type, and the value it got.
+ """
+ return _SubclassOfValidator(type)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _NotValidator:
+ validator = attrib()
+ msg = attrib(
+ converter=default_if_none(
+ "not_ validator child '{validator!r}' "
+ "did not raise a captured error"
+ )
+ )
+ exc_types = attrib(
+ validator=deep_iterable(
+ member_validator=_subclass_of(Exception),
+ iterable_validator=instance_of(tuple),
+ ),
+ )
+
+ def __call__(self, inst, attr, value):
+ try:
+ self.validator(inst, attr, value)
+ except self.exc_types:
+ pass # suppress error to invert validity
+ else:
+ raise ValueError(
+ self.msg.format(
+ validator=self.validator,
+ exc_types=self.exc_types,
+ ),
+ attr,
+ self.validator,
+ value,
+ self.exc_types,
+ )
+
+ def __repr__(self):
+ return f"<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>"
+
+
+def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
+ """
+ A validator that wraps and logically 'inverts' the validator passed to it.
+ It will raise a `ValueError` if the provided validator *doesn't* raise a
+ `ValueError` or `TypeError` (by default), and will suppress the exception
+ if the provided validator *does*.
+
+ Intended to be used with existing validators to compose logic without
+ needing to create inverted variants, for example, ``not_(in_(...))``.
+
+ Args:
+ validator: A validator to be logically inverted.
+
+ msg (str):
+ Message to raise if validator fails. Formatted with keys
+ ``exc_types`` and ``validator``.
+
+ exc_types (tuple[type, ...]):
+ Exception type(s) to capture. Other types raised by child
+ validators will not be intercepted and pass through.
+
+ Raises:
+ ValueError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the validator that failed to raise an
+ exception, the value it got, and the expected exception types.
+
+ .. versionadded:: 22.2.0
+ """
+ try:
+ exc_types = tuple(exc_types)
+ except TypeError:
+ exc_types = (exc_types,)
+ return _NotValidator(validator, msg, exc_types)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OrValidator:
+ validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self.validators:
+ try:
+ v(inst, attr, value)
+ except Exception: # noqa: BLE001, PERF203, S112
+ continue
+ else:
+ return
+
+ msg = f"None of {self.validators!r} satisfied for value {value!r}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f"<or validator wrapping {self.validators!r}>"
+
+
+def or_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators until one of them is
+ satisfied.
+
+ Args:
+ validators (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of validators.
+
+ Raises:
+ ValueError:
+ If no validator is satisfied. Raised with a human-readable error
+ message listing all the wrapped validators and the value that
+ failed all of them.
+
+ .. versionadded:: 24.1.0
+ """
+ vals = []
+ for v in validators:
+ vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
+
+ return _OrValidator(tuple(vals))
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/validators.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/validators.pyi"
new file mode 100644
index 0000000..36a7e80
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attr/validators.pyi"
@@ -0,0 +1,140 @@
+from types import UnionType
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ Mapping,
+ Match,
+ Pattern,
+ TypeVar,
+ overload,
+)
+
+from attrs import _ValidatorType
+from attrs import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_T4 = TypeVar("_T4")
+_T5 = TypeVar("_T5")
+_T6 = TypeVar("_T6")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: tuple[type[_T1], type[_T2]],
+) -> _ValidatorType[_T1 | _T2]: ...
+@overload
+def instance_of(
+ type: tuple[type[_T1], type[_T2], type[_T3]],
+) -> _ValidatorType[_T1 | _T2 | _T3]: ...
+@overload
+def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
+@overload
+def instance_of(type: UnionType) -> _ValidatorType[Any]: ...
+def optional(
+ validator: (
+ _ValidatorType[_T]
+ | list[_ValidatorType[_T]]
+ | tuple[_ValidatorType[_T]]
+ ),
+) -> _ValidatorType[_T | None]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Pattern[AnyStr] | AnyStr,
+ flags: int = ...,
+ func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorArgType[_T],
+ iterable_validator: _ValidatorArgType[_I] | None = ...,
+) -> _ValidatorType[_I]: ...
+@overload
+def deep_mapping(
+ key_validator: _ValidatorArgType[_K],
+ value_validator: _ValidatorArgType[_V] | None = ...,
+ mapping_validator: _ValidatorArgType[_M] | None = ...,
+) -> _ValidatorType[_M]: ...
+@overload
+def deep_mapping(
+ key_validator: _ValidatorArgType[_K] | None = ...,
+ value_validator: _ValidatorArgType[_V] = ...,
+ mapping_validator: _ValidatorArgType[_M] | None = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
+def not_(
+ validator: _ValidatorType[_T],
+ *,
+ msg: str | None = None,
+ exc_types: type[Exception] | Iterable[type[Exception]] = ...,
+) -> _ValidatorType[_T]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+) -> _ValidatorType[_T1 | _T2]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+) -> _ValidatorType[_T1 | _T2 | _T3]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+ __v4: _ValidatorType[_T4],
+) -> _ValidatorType[_T1 | _T2 | _T3 | _T4]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+ __v4: _ValidatorType[_T4],
+ __v5: _ValidatorType[_T5],
+) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+ __v4: _ValidatorType[_T4],
+ __v5: _ValidatorType[_T5],
+ __v6: _ValidatorType[_T6],
+) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5 | _T6]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[Any],
+ __v2: _ValidatorType[Any],
+ __v3: _ValidatorType[Any],
+ __v4: _ValidatorType[Any],
+ __v5: _ValidatorType[Any],
+ __v6: _ValidatorType[Any],
+ *validators: _ValidatorType[Any],
+) -> _ValidatorType[Any]: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/METADATA"
new file mode 100644
index 0000000..51128bb
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/METADATA"
@@ -0,0 +1,235 @@
+Metadata-Version: 2.4
+Name: attrs
+Version: 25.4.0
+Summary: Classes Without Boilerplate
+Project-URL: Documentation, https://www.attrs.org/
+Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html
+Project-URL: GitHub, https://github.com/python-attrs/attrs
+Project-URL: Funding, https://github.com/sponsors/hynek
+Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi
+Author-email: Hynek Schlawack <hs@ox.cx>
+License-Expression: MIT
+License-File: LICENSE
+Keywords: attribute,boilerplate,class
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+Requires-Python: >=3.9
+Description-Content-Type: text/markdown
+
+<p align="center">
+ <a href="https://www.attrs.org/">
+ <img src="https://raw.githubusercontent.com/python-attrs/attrs/main/docs/_static/attrs_logo.svg" width="35%" alt="attrs" />
+ </a>
+</p>
+
+
+*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)).
+Trusted by NASA for [Mars missions since 2020](https://github.com/readme/featured/nasa-ingenuity-helicopter)!
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+
+## Sponsors
+
+*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek).
+Especially those generously supporting us at the *The Organization* tier and higher:
+
+<!-- sponsor-break-begin -->
+
+<p align="center">
+
+<!-- [[[cog
+import pathlib, tomllib
+
+for sponsor in tomllib.loads(pathlib.Path("pyproject.toml").read_text())["tool"]["sponcon"]["sponsors"]:
+ print(f'<a href="{sponsor["url"]}"><img title="{sponsor["title"]}" src="https://www.attrs.org/en/25.4.0/_static/sponsors/{sponsor["img"]}" width="190" /></a>')
+]]] -->
+<a href="https://www.variomedia.de/"><img title="Variomedia AG" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Variomedia.svg" width="190" /></a>
+<a href="https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek"><img title="Tidelift" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Tidelift.svg" width="190" /></a>
+<a href="https://privacy-solutions.org/"><img title="Privacy Solutions" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Privacy-Solutions.svg" width="190" /></a>
+<a href="https://filepreviews.io/"><img title="FilePreviews" src="https://www.attrs.org/en/25.4.0/_static/sponsors/FilePreviews.svg" width="190" /></a>
+<a href="https://polar.sh/"><img title="Polar" src="https://www.attrs.org/en/25.4.0/_static/sponsors/Polar.svg" width="190" /></a>
+<!-- [[[end]]] -->
+
+</p>
+
+<!-- sponsor-break-end -->
+
+<p align="center">
+ <strong>Please consider <a href="https://github.com/sponsors/hynek">joining them</a> to help make <em>attrs</em>’s maintenance more sustainable!</strong>
+</p>
+
+<!-- teaser-end -->
+
+## Example
+
+*attrs* gives you a class decorator and a way to declaratively define the attributes on that class:
+
+<!-- code-begin -->
+
+```pycon
+>>> from attrs import asdict, define, make_class, Factory
+
+>>> @define
+... class SomeClass:
+... a_number: int = 42
+... list_of_numbers: list[int] = Factory(list)
+...
+... def hard_math(self, another_number):
+... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+>>> sc = SomeClass(1, [1, 2, 3])
+>>> sc
+SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+>>> sc.hard_math(3)
+19
+>>> sc == SomeClass(1, [1, 2, 3])
+True
+>>> sc != SomeClass(2, [3, 2, 1])
+True
+
+>>> asdict(sc)
+{'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+>>> SomeClass()
+SomeClass(a_number=42, list_of_numbers=[])
+
+>>> C = make_class("C", ["a", "b"])
+>>> C("foo", "bar")
+C(a='foo', b='bar')
+```
+
+After *declaring* your attributes, *attrs* gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable `__repr__`,
+- equality-checking methods,
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+---
+
+This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0.
+The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**.
+
+Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation!
+
+
+### Hate Type Annotations!?
+
+No problem!
+Types are entirely **optional** with *attrs*.
+Simply assign `attrs.field()` to the attributes instead of annotating them with types:
+
+```python
+from attrs import define, field
+
+@define
+class SomeClass:
+ a_number = field(default=42)
+ list_of_numbers = field(factory=list)
+```
+
+
+## Data Classes
+
+On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*).
+In practice it does a lot more and is more flexible.
+For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger.
+
+For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice.
+
+
+## Project Information
+
+- [**Changelog**](https://www.attrs.org/en/stable/changelog.html)
+- [**Documentation**](https://www.attrs.org/)
+- [**PyPI**](https://pypi.org/project/attrs/)
+- [**Source Code**](https://github.com/python-attrs/attrs)
+- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md)
+- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs)
+- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)
+
+
+### *attrs* for Enterprise
+
+Available as part of the [Tidelift Subscription](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek).
+
+The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
+
+## Release Information
+
+### Backwards-incompatible Changes
+
+- Class-level `kw_only=True` behavior is now consistent with `dataclasses`.
+
+ Previously, a class that sets `kw_only=True` makes all attributes keyword-only, including those from base classes.
+ If an attribute sets `kw_only=False`, that setting is ignored, and it is still made keyword-only.
+
+ Now, only the attributes defined in that class that doesn't explicitly set `kw_only=False` are made keyword-only.
+
+ This shouldn't be a problem for most users, unless you have a pattern like this:
+
+ ```python
+ @attrs.define(kw_only=True)
+ class Base:
+ a: int
+ b: int = attrs.field(default=1, kw_only=False)
+
+ @attrs.define
+ class Subclass(Base):
+ c: int
+ ```
+
+ Here, we have a `kw_only=True` *attrs* class (`Base`) with an attribute that sets `kw_only=False` and has a default (`Base.b`), and then create a subclass (`Subclass`) with required arguments (`Subclass.c`).
+ Previously this would work, since it would make `Base.b` keyword-only, but now this fails since `Base.b` is positional, and we have a required positional argument (`Subclass.c`) following another argument with defaults.
+ [#1457](https://github.com/python-attrs/attrs/issues/1457)
+
+
+### Changes
+
+- Values passed to the `__init__()` method of `attrs` classes are now correctly passed to `__attrs_pre_init__()` instead of their default values (in cases where *kw_only* was not specified).
+ [#1427](https://github.com/python-attrs/attrs/issues/1427)
+- Added support for Python 3.14 and [PEP 749](https://peps.python.org/pep-0749/).
+ [#1446](https://github.com/python-attrs/attrs/issues/1446),
+ [#1451](https://github.com/python-attrs/attrs/issues/1451)
+- `attrs.validators.deep_mapping()` now allows to leave out either *key_validator* xor *value_validator*.
+ [#1448](https://github.com/python-attrs/attrs/issues/1448)
+- `attrs.validators.deep_iterator()` and `attrs.validators.deep_mapping()` now accept lists and tuples for all validators and wrap them into a `attrs.validators.and_()`.
+ [#1449](https://github.com/python-attrs/attrs/issues/1449)
+- Added a new **experimental** way to inspect classes:
+
+ `attrs.inspect(cls)` returns the _effective_ class-wide parameters that were used by *attrs* to construct the class.
+
+ The returned class is the same data structure that *attrs* uses internally to decide how to construct the final class.
+ [#1454](https://github.com/python-attrs/attrs/issues/1454)
+- Fixed annotations for `attrs.field(converter=...)`.
+ Previously, a `tuple` of converters was only accepted if it had exactly one element.
+ [#1461](https://github.com/python-attrs/attrs/issues/1461)
+- The performance of `attrs.asdict()` has been improved by 45–260%.
+ [#1463](https://github.com/python-attrs/attrs/issues/1463)
+- The performance of `attrs.astuple()` has been improved by 49–270%.
+ [#1469](https://github.com/python-attrs/attrs/issues/1469)
+- The type annotation for `attrs.validators.or_()` now allows for different types of validators.
+
+ This was only an issue on Pyright.
+ [#1474](https://github.com/python-attrs/attrs/issues/1474)
+
+
+
+---
+
+[Full changelog →](https://www.attrs.org/en/stable/changelog.html)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/RECORD"
new file mode 100644
index 0000000..70e06bf
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/RECORD"
@@ -0,0 +1,55 @@
+attr/__init__.py,sha256=fOYIvt1eGSqQre4uCS3sJWKZ0mwAuC8UD6qba5OS9_U,2057
+attr/__init__.pyi,sha256=IZkzIjvtbRqDWGkDBIF9dd12FgDa379JYq3GHnVOvFQ,11309
+attr/__pycache__/__init__.cpython-312.pyc,,
+attr/__pycache__/_cmp.cpython-312.pyc,,
+attr/__pycache__/_compat.cpython-312.pyc,,
+attr/__pycache__/_config.cpython-312.pyc,,
+attr/__pycache__/_funcs.cpython-312.pyc,,
+attr/__pycache__/_make.cpython-312.pyc,,
+attr/__pycache__/_next_gen.cpython-312.pyc,,
+attr/__pycache__/_version_info.cpython-312.pyc,,
+attr/__pycache__/converters.cpython-312.pyc,,
+attr/__pycache__/exceptions.cpython-312.pyc,,
+attr/__pycache__/filters.cpython-312.pyc,,
+attr/__pycache__/setters.cpython-312.pyc,,
+attr/__pycache__/validators.cpython-312.pyc,,
+attr/_cmp.py,sha256=3Nn1TjxllUYiX_nJoVnEkXoDk0hM1DYKj5DE7GZe4i0,4117
+attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368
+attr/_compat.py,sha256=x0g7iEUOnBVJC72zyFCgb1eKqyxS-7f2LGnNyZ_r95s,2829
+attr/_config.py,sha256=dGq3xR6fgZEF6UBt_L0T-eUHIB4i43kRmH0P28sJVw8,843
+attr/_funcs.py,sha256=Ix5IETTfz5F01F-12MF_CSFomIn2h8b67EVVz2gCtBE,16479
+attr/_make.py,sha256=NRJDGS8syg2h3YNflVNoK2FwR3CpdSZxx8M6lacwljA,104141
+attr/_next_gen.py,sha256=BQtCUlzwg2gWHTYXBQvrEYBnzBUrDvO57u0Py6UCPhc,26274
+attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469
+attr/_version_info.py,sha256=w4R-FYC3NK_kMkGUWJlYP4cVAlH9HRaC-um3fcjYkHM,2222
+attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
+attr/converters.py,sha256=GlDeOzPeTFgeBBLbj9G57Ez5lAk68uhSALRYJ_exe84,3861
+attr/converters.pyi,sha256=orU2bff-VjQa2kMDyvnMQV73oJT2WRyQuw4ZR1ym1bE,643
+attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977
+attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
+attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795
+attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208
+attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attr/setters.py,sha256=5-dcT63GQK35ONEzSgfXCkbB7pPkaR-qv15mm4PVSzQ,1617
+attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584
+attr/validators.py,sha256=1BnYGTuYvSucGEI4ju-RPNJteVzG0ZlfWpJiWoSFHQ8,21458
+attr/validators.pyi,sha256=ftmW3m4KJ3pQcIXAj-BejT7BY4ZfqrC1G-5W7XvoPds,4082
+attrs-25.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+attrs-25.4.0.dist-info/METADATA,sha256=2Rerxj7agcMRxiwdkt6lC2guqHAmkGKCH13nWWK7ZoQ,10473
+attrs-25.4.0.dist-info/RECORD,,
+attrs-25.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
+attrs-25.4.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109
+attrs/__init__.py,sha256=RxaAZNwYiEh-fcvHLZNpQ_DWKni73M_jxEPEftiq1Zc,1183
+attrs/__init__.pyi,sha256=2gV79g9UxJppGSM48hAZJ6h_MHb70dZoJL31ZNJeZYI,9416
+attrs/__pycache__/__init__.cpython-312.pyc,,
+attrs/__pycache__/converters.cpython-312.pyc,,
+attrs/__pycache__/exceptions.cpython-312.pyc,,
+attrs/__pycache__/filters.cpython-312.pyc,,
+attrs/__pycache__/setters.cpython-312.pyc,,
+attrs/__pycache__/validators.cpython-312.pyc,,
+attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76
+attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76
+attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73
+attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73
+attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL"
new file mode 100644
index 0000000..12228d4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/WHEEL"
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.27.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..2bd6453
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE"
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack and the attrs contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/__init__.py"
new file mode 100644
index 0000000..dc1ce4b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/__init__.py"
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: MIT
+
+from attr import (
+ NOTHING,
+ Attribute,
+ AttrsInstance,
+ Converter,
+ Factory,
+ NothingType,
+ _make_getattr,
+ assoc,
+ cmp_using,
+ define,
+ evolve,
+ field,
+ fields,
+ fields_dict,
+ frozen,
+ has,
+ make_class,
+ mutable,
+ resolve_types,
+ validate,
+)
+from attr._make import ClassProps
+from attr._next_gen import asdict, astuple, inspect
+
+from . import converters, exceptions, filters, setters, validators
+
+
+__all__ = [
+ "NOTHING",
+ "Attribute",
+ "AttrsInstance",
+ "ClassProps",
+ "Converter",
+ "Factory",
+ "NothingType",
+ "__author__",
+ "__copyright__",
+ "__description__",
+ "__doc__",
+ "__email__",
+ "__license__",
+ "__title__",
+ "__url__",
+ "__version__",
+ "__version_info__",
+ "asdict",
+ "assoc",
+ "astuple",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "field",
+ "fields",
+ "fields_dict",
+ "filters",
+ "frozen",
+ "has",
+ "inspect",
+ "make_class",
+ "mutable",
+ "resolve_types",
+ "setters",
+ "validate",
+ "validators",
+]
+
+__getattr__ = _make_getattr(__name__)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/__init__.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/__init__.pyi"
new file mode 100644
index 0000000..6364bac
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/__init__.pyi"
@@ -0,0 +1,314 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Mapping,
+ Sequence,
+ overload,
+ TypeVar,
+)
+
+# Because we need to type our own stuff, we have to make everything from
+# attr explicitly public too.
+from attr import __author__ as __author__
+from attr import __copyright__ as __copyright__
+from attr import __description__ as __description__
+from attr import __email__ as __email__
+from attr import __license__ as __license__
+from attr import __title__ as __title__
+from attr import __url__ as __url__
+from attr import __version__ as __version__
+from attr import __version_info__ as __version_info__
+from attr import assoc as assoc
+from attr import Attribute as Attribute
+from attr import AttrsInstance as AttrsInstance
+from attr import cmp_using as cmp_using
+from attr import converters as converters
+from attr import Converter as Converter
+from attr import evolve as evolve
+from attr import exceptions as exceptions
+from attr import Factory as Factory
+from attr import fields as fields
+from attr import fields_dict as fields_dict
+from attr import filters as filters
+from attr import has as has
+from attr import make_class as make_class
+from attr import NOTHING as NOTHING
+from attr import resolve_types as resolve_types
+from attr import setters as setters
+from attr import validate as validate
+from attr import validators as validators
+from attr import attrib, asdict as asdict, astuple as astuple
+from attr import NothingType as NothingType
+
+if sys.version_info >= (3, 11):
+ from typing import dataclass_transform
+else:
+ from typing_extensions import dataclass_transform
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = bool | Callable[[Any], Any]
+_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
+_CallableConverterType = Callable[[Any], Any]
+_ConverterType = _CallableConverterType | Converter[Any, Any]
+_ReprType = Callable[[Any], str]
+_ReprArgType = bool | _ReprType
+_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
+_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType
+_FieldTransformer = Callable[
+ [type, list["Attribute[Any]"]], list["Attribute[Any]"]
+]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]]
+
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool | None = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType, ...]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType, ...]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: _T | None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType, ...]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(field_specifiers=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(field_specifiers=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+
+@overload
+@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
+def frozen(
+ maybe_cls: _C,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
+def frozen(
+ maybe_cls: None = ...,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+class ClassProps:
+ # XXX: somehow when defining/using enums Mypy starts looking at our own
+ # (untyped) code and causes tons of errors.
+ Hashability: Any
+ KeywordOnly: Any
+
+ is_exception: bool
+ is_slotted: bool
+ has_weakref_slot: bool
+ is_frozen: bool
+ # kw_only: ClassProps.KeywordOnly
+ kw_only: Any
+ collected_fields_by_mro: bool
+ added_init: bool
+ added_repr: bool
+ added_eq: bool
+ added_ordering: bool
+ # hashability: ClassProps.Hashability
+ hashability: Any
+ added_match_args: bool
+ added_str: bool
+ added_pickling: bool
+ on_setattr_hook: _OnSetAttrType | None
+ field_transformer: Callable[[Attribute[Any]], Attribute[Any]] | None
+
+ def __init__(
+ self,
+ is_exception: bool,
+ is_slotted: bool,
+ has_weakref_slot: bool,
+ is_frozen: bool,
+ # kw_only: ClassProps.KeywordOnly
+ kw_only: Any,
+ collected_fields_by_mro: bool,
+ added_init: bool,
+ added_repr: bool,
+ added_eq: bool,
+ added_ordering: bool,
+ # hashability: ClassProps.Hashability
+ hashability: Any,
+ added_match_args: bool,
+ added_str: bool,
+ added_pickling: bool,
+ on_setattr_hook: _OnSetAttrType,
+ field_transformer: Callable[[Attribute[Any]], Attribute[Any]],
+ ) -> None: ...
+ @property
+ def is_hashable(self) -> bool: ...
+
+def inspect(cls: type) -> ClassProps: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/converters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/converters.py"
new file mode 100644
index 0000000..7821f6c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/converters.py"
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.converters import * # noqa: F403
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/exceptions.py"
new file mode 100644
index 0000000..3323f9d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/exceptions.py"
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import * # noqa: F403
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/filters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/filters.py"
new file mode 100644
index 0000000..3080f48
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/filters.py"
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.filters import * # noqa: F403
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/setters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/setters.py"
new file mode 100644
index 0000000..f3d73bb
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/setters.py"
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.setters import * # noqa: F403
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/validators.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/validators.py"
new file mode 100644
index 0000000..037e124
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/attrs/validators.py"
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.validators import * # noqa: F403
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA"
new file mode 100644
index 0000000..d1bc526
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/METADATA"
@@ -0,0 +1,78 @@
+Metadata-Version: 2.4
+Name: certifi
+Version: 2026.1.4
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: https://github.com/certifi/python-certifi
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Project-URL: Source, https://github.com/certifi/python-certifi
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Requires-Python: >=3.7
+License-File: LICENSE
+Dynamic: author
+Dynamic: author-email
+Dynamic: classifier
+Dynamic: description
+Dynamic: home-page
+Dynamic: license
+Dynamic: license-file
+Dynamic: project-url
+Dynamic: requires-python
+Dynamic: summary
+
+Certifi: Python SSL Certificates
+================================
+
+Certifi provides Mozilla's carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
+
+Or from the command line::
+
+ $ python -m certifi
+ /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
+
+Enjoy!
+
+.. _`Requests`: https://requests.readthedocs.io/en/master/
+
+Addition/Removal of Certificates
+--------------------------------
+
+Certifi does not support any addition/removal or other modification of the
+CA trust store content. This project is intended to provide a reliable and
+highly portable root of trust to python deployments. Look to upstream projects
+for methods to use alternate trust.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD"
new file mode 100644
index 0000000..d591898
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/RECORD"
@@ -0,0 +1,14 @@
+certifi-2026.1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+certifi-2026.1.4.dist-info/METADATA,sha256=FSfJEfKuMo6bJlofUrtRpn4PFTYtbYyXpHN_A3ZFpIY,2473
+certifi-2026.1.4.dist-info/RECORD,,
+certifi-2026.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
+certifi-2026.1.4.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
+certifi-2026.1.4.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
+certifi/__init__.py,sha256=969deMMS7Uchipr0oO4dbRBUvRi0uNYCn07VmG1aTrg,94
+certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
+certifi/__pycache__/__init__.cpython-312.pyc,,
+certifi/__pycache__/__main__.cpython-312.pyc,,
+certifi/__pycache__/core.cpython-312.pyc,,
+certifi/cacert.pem,sha256=Tzl1_zCrvzVEO0hgZK6Ly0Hf9wf_31dsdtKS-0WKoKk,270954
+certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394
+certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL"
new file mode 100644
index 0000000..e7fa31b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..62b076c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/licenses/LICENSE"
@@ -0,0 +1,20 @@
+This package contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt). This file can be found in the mozilla source tree:
+https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt"
new file mode 100644
index 0000000..963eac5
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi-2026.1.4.dist-info/top_level.txt"
@@ -0,0 +1 @@
+certifi
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/__init__.py"
new file mode 100644
index 0000000..090fd58
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/__init__.py"
@@ -0,0 +1,4 @@
+from .core import contents, where
+
+__all__ = ["contents", "where"]
+__version__ = "2026.01.04"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/__main__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/__main__.py"
new file mode 100644
index 0000000..8945b5d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/__main__.py"
@@ -0,0 +1,12 @@
+import argparse
+
+from certifi import contents, where
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--contents", action="store_true")
+args = parser.parse_args()
+
+if args.contents:
+ print(contents())
+else:
+ print(where())
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/cacert.pem" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/cacert.pem"
new file mode 100644
index 0000000..132db0d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/cacert.pem"
@@ -0,0 +1,4468 @@
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft ECC Root Certificate Authority 2017"
+# Serial: 136839042543790627607696632466672567020
+# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
+# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
+# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
+-----BEGIN CERTIFICATE-----
+MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
+VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
+MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
+UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
+b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
+ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
+hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
+FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
+L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
+iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft RSA Root Certificate Authority 2017"
+# Serial: 40975477897264996090493496164228220339
+# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
+# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
+# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
+-----BEGIN CERTIFICATE-----
+MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
+MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
+NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
+IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
+EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
+aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
+Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
+ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
+HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
+gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
+jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
+aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
+YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
+W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
+UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
+W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
+LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
+gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
+tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
+SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
+TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
+pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
+xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
+GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
+dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
+AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
+RA+GsCyRxj3qrg+E
+-----END CERTIFICATE-----
+
+# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Label: "e-Szigno Root CA 2017"
+# Serial: 411379200276854331539784714
+# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
+# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
+# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
+-----BEGIN CERTIFICATE-----
+MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
+BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
+LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
+b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
+BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
+THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
+IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
+xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
+Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
+eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
+jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
++efcMQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Label: "certSIGN Root CA G2"
+# Serial: 313609486401300475190
+# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
+# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
+# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
+BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
+Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
+BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
+R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
+dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
+vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
+uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
+n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
+cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
+xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
+rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
+DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
+DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
+LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
+eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
+d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
+kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
+b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
+qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
+OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
+NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
+ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
+pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
+03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
+PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
+1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
+QRBdJ3NghVdJIgc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global Certification Authority"
+# Serial: 1846098327275375458322922162
+# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e
+# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5
+# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8
+-----BEGIN CERTIFICATE-----
+MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw
+CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x
+ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1
+c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx
+OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI
+SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI
+b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn
+swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu
+7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8
+1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW
+80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP
+JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l
+RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw
+hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10
+coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc
+BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n
+twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud
+EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud
+DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W
+0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe
+uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q
+lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB
+aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE
+sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT
+MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe
+qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh
+VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8
+h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9
+EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK
+yeC2nOnOcXHebD8WpHk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P256 Certification Authority"
+# Serial: 4151900041497450638097112925
+# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54
+# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf
+# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4
+-----BEGIN CERTIFICATE-----
+MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG
+SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN
+FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w
+DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw
+CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh
+DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P384 Certification Authority"
+# Serial: 2704997926503831671788816187
+# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6
+# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2
+# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97
+-----BEGIN CERTIFICATE-----
+MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ
+j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF
+1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G
+A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3
+AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC
+MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu
+Sw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Label: "NAVER Global Root Certification Authority"
+# Serial: 9013692873798656336226253319739695165984492813
+# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b
+# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1
+# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65
+-----BEGIN CERTIFICATE-----
+MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM
+BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG
+T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx
+CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD
+b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA
+iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH
+38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE
+HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz
+kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP
+szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq
+vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf
+nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG
+YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo
+0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a
+CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K
+AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I
+36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB
+Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN
+qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj
+cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm
++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL
+hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe
+lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7
+p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8
+piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR
+LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX
+5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO
+dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul
+9XXeifdy
+-----END CERTIFICATE-----
+
+# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS"
+# Serial: 131542671362353147877283741781055151509
+# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb
+# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a
+# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb
+-----BEGIN CERTIFICATE-----
+MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw
+CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw
+FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S
+Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5
+MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL
+DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS
+QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH
+sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK
+Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu
+SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC
+MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy
+v+c=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root R46"
+# Serial: 1552617688466950547958867513931858518042577
+# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef
+# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90
+# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA
+MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD
+VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy
+MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt
+c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ
+OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG
+vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud
+316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo
+0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE
+y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF
+zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE
++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN
+I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs
+x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa
+ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC
+4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4
+7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg
+JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti
+2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk
+pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF
+FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt
+rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk
+ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5
+u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP
+4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6
+N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3
+vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root E46"
+# Serial: 1552617690338932563915843282459653771421763
+# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f
+# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84
+# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58
+-----BEGIN CERTIFICATE-----
+MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx
+CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD
+ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw
+MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex
+HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq
+R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd
+yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8
++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Label: "ANF Secure Server Root CA"
+# Serial: 996390341000653745
+# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96
+# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74
+# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99
+-----BEGIN CERTIFICATE-----
+MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV
+BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk
+YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV
+BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN
+MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF
+UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD
+VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v
+dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj
+cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q
+yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH
+2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX
+H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL
+zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR
+p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz
+W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/
+SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn
+LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3
+n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B
+u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj
+o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
+AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L
+9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej
+rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK
+pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0
+vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq
+OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ
+/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9
+2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI
++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2
+MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo
+tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum EC-384 CA"
+# Serial: 160250656287871593594747141429395092468
+# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1
+# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed
+# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6
+-----BEGIN CERTIFICATE-----
+MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
+CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
+JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
+EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
+WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
+LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
+BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
+KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
+Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
+EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
+UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
+nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Root CA"
+# Serial: 40870380103424195783807378461123655149
+# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29
+# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5
+# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd
+-----BEGIN CERTIFICATE-----
+MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6
+MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu
+MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV
+BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw
+MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg
+U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo
+b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ
+n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q
+p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq
+NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF
+8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3
+HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa
+mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi
+7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF
+ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P
+qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ
+v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6
+Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1
+vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD
+ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4
+WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo
+zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR
+5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ
+GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf
+5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq
+0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D
+P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM
+qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP
+0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf
+E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb
+-----END CERTIFICATE-----
+
+# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Label: "TunTrust Root CA"
+# Serial: 108534058042236574382096126452369648152337120275
+# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4
+# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb
+# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL
+BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg
+Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv
+b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG
+EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u
+IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ
+n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd
+2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF
+VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ
+GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF
+li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU
+r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2
+eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb
+MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg
+jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB
+7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW
+5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE
+ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0
+90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z
+xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu
+QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4
+FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH
+22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP
+xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn
+dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5
+Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b
+nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ
+CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH
+u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj
+d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS RSA Root CA 2021"
+# Serial: 76817823531813593706434026085292783742
+# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91
+# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d
+# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs
+MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg
+Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL
+MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl
+YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv
+b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l
+mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE
+4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv
+a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M
+pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw
+Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b
+LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY
+AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB
+AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq
+E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr
+W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ
+CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU
+X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3
+f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja
+H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP
+JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P
+zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt
+jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0
+/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT
+BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79
+aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW
+xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU
+63ZTGI0RmLo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS ECC Root CA 2021"
+# Serial: 137515985548005187474074462014555733966
+# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0
+# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48
+# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01
+-----BEGIN CERTIFICATE-----
+MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw
+CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh
+cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v
+dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG
+A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj
+aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg
+Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7
+KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y
+STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw
+SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN
+nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 1977337328857672817
+# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3
+# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe
+# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1
+MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc
+tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd
+IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j
+b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC
+AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw
+ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m
+iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF
+Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ
+hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P
+Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE
+EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV
+1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t
+CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR
+5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw
+f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9
+ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK
+GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus ECC Root CA"
+# Serial: 630369271402956006249506845124680065938238527194
+# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85
+# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1
+# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3
+-----BEGIN CERTIFICATE-----
+MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw
+RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY
+BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz
+MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u
+LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0
+v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd
+e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw
+V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA
+AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG
+GJTO
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus Root CA"
+# Serial: 387574501246983434957692974888460947164905180485
+# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc
+# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7
+# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87
+-----BEGIN CERTIFICATE-----
+MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL
+BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x
+FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx
+MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s
+THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc
+IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU
+AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+
+GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9
+8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH
+flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt
+J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim
+0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN
+pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ
+UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW
+OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB
+AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet
+8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd
+nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j
+bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM
+Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv
+TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS
+S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr
+I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9
+b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB
+UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P
+Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven
+sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X2 O=Internet Security Research Group
+# Subject: CN=ISRG Root X2 O=Internet Security Research Group
+# Label: "ISRG Root X2"
+# Serial: 87493402998870891108772069816698636114
+# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5
+# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af
+# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
+
+# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Label: "HiPKI Root CA - G1"
+# Serial: 60966262342023497858655262305426234976
+# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3
+# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60
+# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc
+-----BEGIN CERTIFICATE-----
+MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa
+Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3
+YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw
+qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv
+Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6
+lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz
+Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ
+KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK
+FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj
+HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr
+y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ
+/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM
+a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6
+fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG
+SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi
+7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc
+SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza
+ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc
+XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg
+iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho
+L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF
+Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr
+kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+
+vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU
+YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 159662223612894884239637590694
+# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc
+# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28
+# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2
+-----BEGIN CERTIFICATE-----
+MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD
+VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw
+MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g
+UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT
+BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx
+uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV
+HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/
++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147
+bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 159662320309726417404178440727
+# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40
+# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a
+# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
+27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
+Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
+TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
+qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
+szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
+Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
+MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
+wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
+aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
+VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
+C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
+QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
+h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
+7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
+ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
+MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
+Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
+6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
+0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
+2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
+bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 159662449406622349769042896298
+# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc
+# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94
+# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt
+nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY
+6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu
+MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k
+RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg
+f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV
++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo
+dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW
+Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa
+G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq
+gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H
+vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8
+0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC
+B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u
+NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg
+yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev
+HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6
+xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR
+TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg
+JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV
+7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl
+6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 159662495401136852707857743206
+# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73
+# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46
+# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G
+jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2
+4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7
+VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm
+ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 159662532700760215368942768210
+# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8
+# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47
+# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi
+QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR
+HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D
+9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8
+p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Label: "Telia Root CA v2"
+# Serial: 7288924052977061235122729490515358
+# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48
+# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd
+# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx
+CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE
+AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1
+NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ
+MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq
+AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9
+vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9
+lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD
+n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT
+7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o
+6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC
+TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6
+WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R
+DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI
+pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj
+YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy
+rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ
+8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi
+0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM
+A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS
+SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K
+TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF
+6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er
+3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt
+Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT
+VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW
+ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA
+rBPuUBQemMc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 1 2020"
+# Serial: 165870826978392376648679885835942448534
+# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed
+# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67
+# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5
+NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS
+zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0
+QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/
+VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW
+wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV
+dWNbFJWcHwHP2NVypw87
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 1 2020"
+# Serial: 126288379621884218666039612629459926992
+# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e
+# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07
+# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5
+NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC
+/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD
+wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3
+OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA
+y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb
+gfM0agPnIjhQW+0ZT0MW
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS ECC P384 Root G5"
+# Serial: 13129116028163249804115411775095713523
+# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed
+# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee
+# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05
+-----BEGIN CERTIFICATE-----
+MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp
+Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2
+MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
+bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS
+7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp
+0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS
+B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49
+BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ
+LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4
+DXZDjC5Ty3zfDBeWUA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS RSA4096 Root G5"
+# Serial: 11930366277458970227240571539258396554
+# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1
+# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35
+# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT
+HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN
+NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
+IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+
+ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0
+2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp
+wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM
+pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD
+nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po
+sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx
+Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd
+Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX
+KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe
+XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL
+tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv
+TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN
+AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw
+GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H
+PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF
+O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ
+REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik
+AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv
+/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+
+p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw
+MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF
+qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK
+ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root R1 O=Certainly
+# Subject: CN=Certainly Root R1 O=Certainly
+# Label: "Certainly Root R1"
+# Serial: 188833316161142517227353805653483829216
+# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12
+# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af
+# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw
+PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy
+dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0
+YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2
+1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT
+vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed
+aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0
+1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5
+r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5
+cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ
+wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ
+6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA
+2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH
+Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR
+eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB
+/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u
+d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr
+PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d
+8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi
+1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd
+rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di
+taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7
+lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj
+yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn
+Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy
+yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n
+wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6
+OV+KmalBWQewLK8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root E1 O=Certainly
+# Subject: CN=Certainly Root E1 O=Certainly
+# Label: "Certainly Root E1"
+# Serial: 8168531406727139161245376702891150584
+# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9
+# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b
+# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2
+-----BEGIN CERTIFICATE-----
+MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw
+CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu
+bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ
+BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s
+eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK
++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2
+QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4
+hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm
+ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
+BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication ECC RootCA1"
+# Serial: 15446673492073852651
+# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86
+# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41
+# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11
+-----BEGIN CERTIFICATE-----
+MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT
+AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD
+VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx
+NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT
+HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5
+IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl
+dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK
+ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu
+9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
+be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA1"
+# Serial: 113562791157148395269083148143378328608
+# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90
+# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a
+# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU
+MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI
+T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz
+MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF
+SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh
+bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z
+xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ
+spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5
+58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR
+at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll
+5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq
+nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK
+V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/
+pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO
+z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn
+jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+
+WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF
+7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4
+YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli
+awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u
++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88
+X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN
+SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo
+P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI
++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz
+znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9
+eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2
+YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy
+r/6zcCwupvI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA2"
+# Serial: 58605626836079930195615843123109055211
+# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c
+# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6
+# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82
+-----BEGIN CERTIFICATE-----
+MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw
+CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ
+VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy
+MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ
+TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS
+b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B
+IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+
++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK
+sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA
+94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B
+43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root E46"
+# Serial: 88989738453351742415770396670917916916
+# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01
+# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a
+# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw
+CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T
+ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN
+MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG
+A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT
+ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC
+WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+
+6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B
+Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa
+qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q
+4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root R46"
+# Serial: 156256931880233212765902055439220583700
+# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5
+# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38
+# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06
+-----BEGIN CERTIFICATE-----
+MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD
+Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw
+HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY
+MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp
+YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa
+ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz
+SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf
+iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X
+ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3
+IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS
+VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE
+SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu
++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt
+8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L
+HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt
+zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P
+AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c
+mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ
+YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52
+gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA
+Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB
+JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX
+DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui
+TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5
+dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65
+LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp
+0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY
+QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS RSA Root CA 2022"
+# Serial: 148535279242832292258835760425842727825
+# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da
+# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca
+# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed
+-----BEGIN CERTIFICATE-----
+MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO
+MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD
+DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX
+DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw
+b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC
+AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP
+L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY
+t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins
+S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3
+PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO
+L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3
+R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w
+dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS
++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS
+d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG
+AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f
+gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j
+BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z
+NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt
+hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM
+QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf
+R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ
+DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW
+P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy
+lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq
+bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w
+AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q
+r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji
+Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU
+98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS ECC Root CA 2022"
+# Serial: 26605119622390491762507526719404364228
+# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5
+# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39
+# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT
+U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2
+MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh
+dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm
+acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN
+SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME
+GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW
+uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp
+15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN
+b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA ECC TLS 2021"
+# Serial: 81873346711060652204712539181482831616
+# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8
+# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd
+# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8
+-----BEGIN CERTIFICATE-----
+MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w
+LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w
+CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0
+MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF
+Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI
+zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X
+tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4
+AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2
+KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD
+aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu
+CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo
+9H1/IISpQuQo
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA RSA TLS 2021"
+# Serial: 111436099570196163832749341232207667876
+# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2
+# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48
+# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f
+-----BEGIN CERTIFICATE-----
+MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM
+MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx
+MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00
+MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD
+QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z
+4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv
+Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ
+kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs
+GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln
+nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh
+3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD
+0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy
+geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8
+ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB
+c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI
+pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS
+4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs
+o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ
+qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw
+xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM
+rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4
+AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR
+0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY
+o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
+dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
+oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia Global Root CA G3"
+# Serial: 576386314500428537169965010905813481816650257167
+# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
+# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
+# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
+-----BEGIN CERTIFICATE-----
+MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
+BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
+ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
+Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
+IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
+cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
+DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
+T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
+AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
+nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
+qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
+yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
+hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
+zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
+kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
+f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
+uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
+o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
+MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
+BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
+wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
+XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
+JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
+ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
+VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
+xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
+AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
+7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
+gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
+FGWsJwt0ivKH
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia Global Root CA G4"
+# Serial: 451799571007117016466790293371524403291602933463
+# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
+# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
+# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
+-----BEGIN CERTIFICATE-----
+MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
+WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
+IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
+MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
+VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
+dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
+s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
+LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
+pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
+AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
+UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
+/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
+# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
+# Label: "Telekom Security TLS ECC Root 2020"
+# Serial: 72082518505882327255703894282316633856
+# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
+# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
+# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
+-----BEGIN CERTIFICATE-----
+MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
+CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
+bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
+MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
+JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
+AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
+tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
+f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
+MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
+z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
+27iQ7t0l
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
+# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
+# Label: "Telekom Security TLS RSA Root 2023"
+# Serial: 44676229530606711399881795178081572759
+# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
+# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
+# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
+MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
+eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
+MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
+REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
+A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
+cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
+cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
+U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
+Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
+BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
+8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
+co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
+8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
+rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
+mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
+gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
+p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
+pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
+9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
+M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
+GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
+CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
+xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
+w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
+L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
+X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
+ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
+dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
+# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
+# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
+# Serial: 65916896770016886708751106294915943533
+# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
+# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
+# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
+-----BEGIN CERTIFICATE-----
+MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
+CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
+YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
+IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
+CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
+YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
+IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
+e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
+cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
+BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
+PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
+hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
+XSaQpYXFuXqUPoeovQA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA CYBER Root CA"
+# Serial: 85076849864375384482682434040119489222
+# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
+# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
+# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
+MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
+IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
+WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
+LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
+Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
+40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
+avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
+34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
+JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
+j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
+Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
+2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
+S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
+oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
+kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
+5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
+BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
+AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
+tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
+68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
+TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
+RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
+f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
+Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
+8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
+NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
+xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
+t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
+# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
+# Label: "SecureSign Root CA12"
+# Serial: 587887345431707215246142177076162061960426065942
+# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
+# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
+# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
+LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
+NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
+eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
+b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
+KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
+p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
+J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
+FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
+hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
+h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
+AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
+mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
+mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
+8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
+55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
+yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
+# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
+# Label: "SecureSign Root CA14"
+# Serial: 575790784512929437950770173562378038616896959179
+# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
+# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
+# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
+BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
+LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
+NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
+eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
+b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
+FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
+vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
+6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
+/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
+kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
+0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
+y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
+18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
+0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
+SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
+ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
+86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
+rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
+ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
+zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
+DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
+2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
+FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
+K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
+dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
+Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
+365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
+JRNItX+S
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
+# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
+# Label: "SecureSign Root CA15"
+# Serial: 126083514594751269499665114766174399806381178503
+# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
+# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
+# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
+-----BEGIN CERTIFICATE-----
+MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
+UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
+dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
+NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
+cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
+IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
+wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
+ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
+9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
+4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
+bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 2 2023"
+# Serial: 153168538924886464690566649552453098598
+# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85
+# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87
+# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1
+-----BEGIN CERTIFICATE-----
+MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI
+MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE
+LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw
+OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi
+MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr
+i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE
+gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8
+k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT
+Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl
+2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U
+cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP
+/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS
+uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+
+0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N
+DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+
+XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61
+GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG
+OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y
+XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI
+FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n
+riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR
+VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc
+LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn
+4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD
+hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG
+koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46
+ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS
+Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80
+knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ
+hJ65bvspmZDogNOfJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia TLS ECC Root CA"
+# Serial: 310892014698942880364840003424242768478804666567
+# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c
+# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36
+# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11
+-----BEGIN CERTIFICATE-----
+MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw
+WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
+IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw
+NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE
+ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB
+c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/
+AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp
+guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw
+DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01
+L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR
+OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia TLS RSA Root CA"
+# Serial: 160405846464868906657516898462547310235378010780
+# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12
+# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa
+# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3
+-----BEGIN CERTIFICATE-----
+MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM
+BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp
+ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN
+MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG
+A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1
+c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC
+AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+
+NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ
+Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561
+HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32
+ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb
+xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX
+i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ
+UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j
+TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT
+bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8
+S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3
+Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4
+iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt
+7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp
+2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ
+g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj
+pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M
+pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP
+XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe
+SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0
+ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy
+323imttUQ/hHWKNddBWcwauwxzQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 2 2023"
+# Serial: 139766439402180512324132425437959641711
+# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6
+# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b
+# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce
+-----BEGIN CERTIFICATE-----
+MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI
+MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE
+LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw
+OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi
+MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK
+F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE
+7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe
+EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6
+lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb
+RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV
+jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc
+jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx
+TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+
+ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk
+hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF
+NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH
+kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG
+OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y
+XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14
+QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4
+pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q
+3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU
+t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX
+cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8
+ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT
+2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs
+7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP
+gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst
+Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh
+XBxvWHZks/wCuPWdCg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG
+# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG
+# Label: "SwissSign RSA TLS Root CA 2022 - 1"
+# Serial: 388078645722908516278762308316089881486363258315
+# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39
+# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce
+# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41
+-----BEGIN CERTIFICATE-----
+MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE
+AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx
+MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT
+d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg
+MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX
+vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7
+LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX
+5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE
+EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt
+/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x
+0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5
+KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM
+0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd
+OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta
+clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK
+wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
+AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4
+DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL
+BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3
+10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz
+Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ
+iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc
+gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM
+ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF
+LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp
+zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td
+Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0
+rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO
+gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE Server Root ECC G1 O=OISTE Foundation
+# Subject: CN=OISTE Server Root ECC G1 O=OISTE Foundation
+# Label: "OISTE Server Root ECC G1"
+# Serial: 47819833811561661340092227008453318557
+# MD5 Fingerprint: 42:a7:d2:35:ae:02:92:db:19:76:08:de:2f:05:b4:d4
+# SHA1 Fingerprint: 3b:f6:8b:09:ae:2a:92:7b:ba:e3:8d:3f:11:95:d9:e6:44:0c:45:e2
+# SHA256 Fingerprint: ee:c9:97:c0:c3:0f:21:6f:7e:3b:8b:30:7d:2b:ae:42:41:2d:75:3f:c8:21:9d:af:d1:52:0b:25:72:85:0f:49
+-----BEGIN CERTIFICATE-----
+MIICNTCCAbqgAwIBAgIQI/nD1jWvjyhLH/BU6n6XnTAKBggqhkjOPQQDAzBLMQsw
+CQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UEAwwY
+T0lTVEUgU2VydmVyIFJvb3QgRUNDIEcxMB4XDTIzMDUzMTE0NDIyOFoXDTQ4MDUy
+NDE0NDIyN1owSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5kYXRp
+b24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IEVDQyBHMTB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABBcv+hK8rBjzCvRE1nZCnrPoH7d5qVi2+GXROiFPqOujvqQy
+cvO2Ackr/XeFblPdreqqLiWStukhEaivtUwL85Zgmjvn6hp4LrQ95SjeHIC6XG4N
+2xml4z+cKrhAS93mT6NjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBQ3
+TYhlz/w9itWj8UnATgwQb0K0nDAdBgNVHQ4EFgQUN02IZc/8PYrVo/FJwE4MEG9C
+tJwwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2kAMGYCMQCpKjAd0MKfkFFR
+QD6VVCHNFmb3U2wIFjnQEnx/Yxvf4zgAOdktUyBFCxxgZzFDJe0CMQCSia7pXGKD
+YmH5LVerVrkR3SW+ak5KGoJr3M/TvEqzPNcum9v4KGm8ay3sMaE641c=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE Server Root RSA G1 O=OISTE Foundation
+# Subject: CN=OISTE Server Root RSA G1 O=OISTE Foundation
+# Label: " OISTE Server Root RSA G1"
+# Serial: 113845518112613905024960613408179309848
+# MD5 Fingerprint: 23:a7:9e:d4:70:b8:b9:14:57:41:8a:7e:44:59:e2:68
+# SHA1 Fingerprint: f7:00:34:25:94:88:68:31:e4:34:87:3f:70:fe:86:b3:86:9f:f0:6e
+# SHA256 Fingerprint: 9a:e3:62:32:a5:18:9f:fd:db:35:3d:fd:26:52:0c:01:53:95:d2:27:77:da:c5:9d:b5:7b:98:c0:89:a6:51:e6
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIQVaXZZ5Qoxu0M+ifdWwFNGDANBgkqhkiG9w0BAQwFADBL
+MQswCQYDVQQGEwJDSDEZMBcGA1UECgwQT0lTVEUgRm91bmRhdGlvbjEhMB8GA1UE
+AwwYT0lTVEUgU2VydmVyIFJvb3QgUlNBIEcxMB4XDTIzMDUzMTE0MzcxNloXDTQ4
+MDUyNDE0MzcxNVowSzELMAkGA1UEBhMCQ0gxGTAXBgNVBAoMEE9JU1RFIEZvdW5k
+YXRpb24xITAfBgNVBAMMGE9JU1RFIFNlcnZlciBSb290IFJTQSBHMTCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAKqu9KuCz/vlNwvn1ZatkOhLKdxVYOPM
+vLO8LZK55KN68YG0nnJyQ98/qwsmtO57Gmn7KNByXEptaZnwYx4M0rH/1ow00O7b
+rEi56rAUjtgHqSSY3ekJvqgiG1k50SeH3BzN+Puz6+mTeO0Pzjd8JnduodgsIUzk
+ik/HEzxux9UTl7Ko2yRpg1bTacuCErudG/L4NPKYKyqOBGf244ehHa1uzjZ0Dl4z
+O8vbUZeUapU8zhhabkvG/AePLhq5SvdkNCncpo1Q4Y2LS+VIG24ugBA/5J8bZT8R
+tOpXaZ+0AOuFJJkk9SGdl6r7NH8CaxWQrbueWhl/pIzY+m0o/DjH40ytas7ZTpOS
+jswMZ78LS5bOZmdTaMsXEY5Z96ycG7mOaES3GK/m5Q9l3JUJsJMStR8+lKXHiHUh
+sd4JJCpM4rzsTGdHwimIuQq6+cF0zowYJmXa92/GjHtoXAvuY8BeS/FOzJ8vD+Ho
+mnqT8eDI278n5mUpezbgMxVz8p1rhAhoKzYHKyfMeNhqhw5HdPSqoBNdZH702xSu
++zrkL8Fl47l6QGzwBrd7KJvX4V84c5Ss2XCTLdyEr0YconosP4EmQufU2MVshGYR
+i3drVByjtdgQ8K4p92cIiBdcuJd5z+orKu5YM+Vt6SmqZQENghPsJQtdLEByFSnT
+kCz3GkPVavBpAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU
+8snBDw1jALvsRQ5KH7WxszbNDo0wHQYDVR0OBBYEFPLJwQ8NYwC77EUOSh+1sbM2
+zQ6NMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQwFAAOCAgEANGd5sjrG5T33
+I3K5Ce+SrScfoE4KsvXaFwyihdJ+klH9FWXXXGtkFu6KRcoMQzZENdl//nk6HOjG
+5D1rd9QhEOP28yBOqb6J8xycqd+8MDoX0TJD0KqKchxRKEzdNsjkLWd9kYccnbz8
+qyiWXmFcuCIzGEgWUOrKL+mlSdx/PKQZvDatkuK59EvV6wit53j+F8Bdh3foZ3dP
+AGav9LEDOr4SfEE15fSmG0eLy3n31r8Xbk5l8PjaV8GUgeV6Vg27Rn9vkf195hfk
+gSe7BYhW3SCl95gtkRlpMV+bMPKZrXJAlszYd2abtNUOshD+FKrDgHGdPY3ofRRs
+YWSGRqbXVMW215AWRqWFyp464+YTFrYVI8ypKVL9AMb2kI5Wj4kI3Zaq5tNqqYY1
+9tVFeEJKRvwDyF7YZvZFZSS0vod7VSCd9521Kvy5YhnLbDuv0204bKt7ph6N/Ome
+/msVuduCmsuY33OhkKCgxeDoAaijFJzIwZqsFVAzje18KotzlUBDJvyBpCpfOZC3
+J8tRd/iWkx7P8nd9H0aTolkelUTFLXVksNb54Dxp6gS1HAviRkRNQzuXSXERvSS2
+wq1yVAb+axj5d9spLFKebXd7Yv0PTY6YMjAwcRLWJTXjn/hvnLXrahut6hDTlhZy
+BiElxky8j3C7DOReIoMt0r7+hVu05L0=
+-----END CERTIFICATE-----
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/core.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/core.py"
new file mode 100644
index 0000000..1c9661c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/core.py"
@@ -0,0 +1,83 @@
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem or its contents.
+"""
+import sys
+import atexit
+
+def exit_cacert_ctx() -> None:
+ _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
+
+
+if sys.version_info >= (3, 11):
+
+ from importlib.resources import as_file, files
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where() -> str:
+ # This is slightly terrible, but we want to delay extracting the file
+ # in cases where we're inside of a zipimport situation until someone
+ # actually calls where(), but we don't want to re-extract the file
+ # on every call of where(), so we'll do it once then store it in a
+ # global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you to
+ # manage the cleanup of this file, so it doesn't actually return a
+ # path, it returns a context manager that will give you the path
+ # when you enter it and will do any cleanup when you leave it. In
+ # the common case of not needing a temporary file, it will just
+ # return the file system location and the __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+ atexit.register(exit_cacert_ctx)
+
+ return _CACERT_PATH
+
+ def contents() -> str:
+ return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
+
+else:
+
+ from importlib.resources import path as get_path, read_text
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where() -> str:
+ # This is slightly terrible, but we want to delay extracting the
+ # file in cases where we're inside of a zipimport situation until
+ # someone actually calls where(), but we don't want to re-extract
+ # the file on every call of where(), so we'll do it once then store
+ # it in a global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you
+ # to manage the cleanup of this file, so it doesn't actually
+ # return a path, it returns a context manager that will give
+ # you the path when you enter it and will do any cleanup when
+ # you leave it. In the common case of not needing a temporary
+ # file, it will just return the file system location and the
+ # __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = get_path("certifi", "cacert.pem")
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+ atexit.register(exit_cacert_ctx)
+
+ return _CACERT_PATH
+
+ def contents() -> str:
+ return read_text("certifi", "cacert.pem", encoding="ascii")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/certifi/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA"
new file mode 100644
index 0000000..8d32edc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/METADATA"
@@ -0,0 +1,764 @@
+Metadata-Version: 2.4
+Name: charset-normalizer
+Version: 3.4.4
+Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
+Author-email: "Ahmed R. TAHRI" <tahri.ahmed@proton.me>
+Maintainer-email: "Ahmed R. TAHRI" <tahri.ahmed@proton.me>
+License: MIT
+Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md
+Project-URL: Documentation, https://charset-normalizer.readthedocs.io/
+Project-URL: Code, https://github.com/jawah/charset_normalizer
+Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues
+Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Text Processing :: Linguistic
+Classifier: Topic :: Utilities
+Classifier: Typing :: Typed
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: unicode-backport
+Dynamic: license-file
+
+<h1 align="center">Charset Detection, for Everyone 👋</h1>
+
+<p align="center">
+ <sup>The Real First Universal Charset Detector</sup><br>
+ <a href="https://pypi.org/project/charset-normalizer">
+ <img src="https://img.shields.io/pypi/pyversions/charset_normalizer.svg?orange=blue" />
+ </a>
+ <a href="https://pepy.tech/project/charset-normalizer/">
+ <img alt="Download Count Total" src="https://static.pepy.tech/badge/charset-normalizer/month" />
+ </a>
+ <a href="https://bestpractices.coreinfrastructure.org/projects/7297">
+ <img src="https://bestpractices.coreinfrastructure.org/projects/7297/badge">
+ </a>
+</p>
+<p align="center">
+ <sup><i>Featured Packages</i></sup><br>
+ <a href="https://github.com/jawah/niquests">
+ <img alt="Static Badge" src="https://img.shields.io/badge/Niquests-Most_Advanced_HTTP_Client-cyan">
+ </a>
+ <a href="https://github.com/jawah/wassima">
+ <img alt="Static Badge" src="https://img.shields.io/badge/Wassima-Certifi_Replacement-cyan">
+ </a>
+</p>
+<p align="center">
+ <sup><i>In other language (unofficial port - by the community)</i></sup><br>
+ <a href="https://github.com/nickspring/charset-normalizer-rs">
+ <img alt="Static Badge" src="https://img.shields.io/badge/Rust-red">
+ </a>
+</p>
+
+> A library that helps you read text from an unknown charset encoding.<br /> Motivated by `chardet`,
+> I'm trying to resolve the issue by taking a new approach.
+> All IANA character set names for which the Python core library provides codecs are supported.
+
+<p align="center">
+ >>>>> <a href="https://charsetnormalizerweb.ousret.now.sh" target="_blank">👉 Try Me Online Now, Then Adopt Me 👈 </a> <<<<<
+</p>
+
+This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
+
+| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
+|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
+| `Fast` | ❌ | ✅ | ✅ |
+| `Universal**` | ❌ | ✅ | ❌ |
+| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
+| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
+| `License` | LGPL-2.1<br>_restrictive_ | MIT | MPL-1.1<br>_restrictive_ |
+| `Native Python` | ✅ | ✅ | ❌ |
+| `Detect spoken language` | ❌ | ✅ | N/A |
+| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
+| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |
+| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
+
+<p align="center">
+<img src="https://i.imgflip.com/373iay.gif" alt="Reading Normalized Text" width="226"/><img src="https://media.tenor.com/images/c0180f70732a18b4965448d33adba3d0/tenor.gif" alt="Cat Reading Text" width="200"/>
+</p>
+
+*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*<br>
+
+## ⚡ Performance
+
+This package offer better performance than its counterpart Chardet. Here are some numbers.
+
+| Package | Accuracy | Mean per file (ms) | File per sec (est) |
+|-----------------------------------------------|:--------:|:------------------:|:------------------:|
+| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |
+| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
+
+| Package | 99th percentile | 95th percentile | 50th percentile |
+|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
+| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |
+| charset-normalizer | 100 ms | 50 ms | 5 ms |
+
+_updated as of december 2024 using CPython 3.12_
+
+Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
+
+> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
+> And yes, these results might change at any time. The dataset can be updated to include more files.
+> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
+> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
+> (e.g. Supported Encoding) Challenge-them if you want.
+
+## ✨ Installation
+
+Using pip:
+
+```sh
+pip install charset-normalizer -U
+```
+
+## 🚀 Basic Usage
+
+### CLI
+This package comes with a CLI.
+
+```
+usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
+ file [file ...]
+
+The Real First Universal Charset Detector. Discover originating encoding used
+on text file. Normalize text to unicode.
+
+positional arguments:
+ files File(s) to be analysed
+
+optional arguments:
+ -h, --help show this help message and exit
+ -v, --verbose Display complementary information about file if any.
+ Stdout will contain logs about the detection process.
+ -a, --with-alternative
+ Output complementary possibilities if any. Top-level
+ JSON WILL be a list.
+ -n, --normalize Permit to normalize input file. If not set, program
+ does not write anything.
+ -m, --minimal Only output the charset detected to STDOUT. Disabling
+ JSON output.
+ -r, --replace Replace file when trying to normalize it instead of
+ creating a new one.
+ -f, --force Replace file without asking if you are sure, use this
+ flag with caution.
+ -t THRESHOLD, --threshold THRESHOLD
+ Define a custom maximum amount of chaos allowed in
+ decoded content. 0. <= chaos <= 1.
+ --version Show version information and exit.
+```
+
+```bash
+normalizer ./data/sample.1.fr.srt
+```
+
+or
+
+```bash
+python -m charset_normalizer ./data/sample.1.fr.srt
+```
+
+🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
+
+```json
+{
+ "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
+ "encoding": "cp1252",
+ "encoding_aliases": [
+ "1252",
+ "windows_1252"
+ ],
+ "alternative_encodings": [
+ "cp1254",
+ "cp1256",
+ "cp1258",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ "mbcs"
+ ],
+ "language": "French",
+ "alphabets": [
+ "Basic Latin",
+ "Latin-1 Supplement"
+ ],
+ "has_sig_or_bom": false,
+ "chaos": 0.149,
+ "coherence": 97.152,
+ "unicode_path": null,
+ "is_preferred": true
+}
+```
+
+### Python
+*Just print out normalized text*
+```python
+from charset_normalizer import from_path
+
+results = from_path('./my_subtitle.srt')
+
+print(str(results.best()))
+```
+
+*Upgrade your code without effort*
+```python
+from charset_normalizer import detect
+```
+
+The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
+
+See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
+
+## 😇 Why
+
+When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
+reliable alternative using a completely different method. Also! I never back down on a good challenge!
+
+I **don't care** about the **originating charset** encoding, because **two different tables** can
+produce **two identical rendered string.**
+What I want is to get readable text, the best I can.
+
+In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
+
+Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
+
+## 🍰 How
+
+ - Discard all charset encoding table that could not fit the binary content.
+ - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
+ - Extract matches with the lowest mess detected.
+ - Additionally, we measure coherence / probe for a language.
+
+**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
+
+*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
+**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).
+ I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
+ improve or rewrite it.
+
+*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
+that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
+
+## ⚡ Known limitations
+
+ - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
+ - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
+
+## ⚠️ About Python EOLs
+
+**If you are running:**
+
+- Python >=2.7,<3.5: Unsupported
+- Python 3.5: charset-normalizer < 2.1
+- Python 3.6: charset-normalizer < 3.1
+- Python 3.7: charset-normalizer < 4.0
+
+Upgrade your Python interpreter as soon as possible.
+
+## 👤 Contributing
+
+Contributions, issues and feature requests are very much welcome.<br />
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
+
+## 📝 License
+
+Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).<br />
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
+
+Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
+
+## 💼 For Enterprise
+
+Professional support for charset-normalizer is available as part of the [Tidelift
+Subscription][1]. Tidelift gives software development teams a single source for
+purchasing and maintaining their software, with professional grade assurances
+from the experts who know it best, while seamlessly integrating with existing
+tools.
+
+[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
+
+[](https://www.bestpractices.dev/projects/7297)
+
+# Changelog
+All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+
+## [3.4.4](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.4) (2025-10-13)
+
+### Changed
+- Bound `setuptools` to a specific constraint `setuptools>=68,<=81`.
+- Raised upper bound of mypyc for the optional pre-built extension to v1.18.2
+
+### Removed
+- `setuptools-scm` as a build dependency.
+
+### Misc
+- Enforced hashes in `dev-requirements.txt` and created `ci-requirements.txt` for security purposes.
+- Additional pre-built wheels for riscv64, s390x, and armv7l architectures.
+- Restore ` multiple.intoto.jsonl` in GitHub releases in addition to individual attestation file per wheel.
+
+## [3.4.3](https://github.com/Ousret/charset_normalizer/compare/3.4.2...3.4.3) (2025-08-09)
+
+### Changed
+- mypy(c) is no longer a required dependency at build time if `CHARSET_NORMALIZER_USE_MYPYC` isn't set to `1`. (#595) (#583)
+- automatically lower confidence on small bytes samples that are not Unicode in `detect` output legacy function. (#391)
+
+### Added
+- Custom build backend to overcome inability to mark mypy as an optional dependency in the build phase.
+- Support for Python 3.14
+
+### Fixed
+- sdist archive contained useless directories.
+- automatically fallback on valid UTF-16 or UTF-32 even if the md says it's noisy. (#633)
+
+### Misc
+- SBOM are automatically published to the relevant GitHub release to comply with regulatory changes.
+ Each published wheel comes with its SBOM. We choose CycloneDX as the format.
+- Prebuilt optimized wheel are no longer distributed by default for CPython 3.7 due to a change in cibuildwheel.
+
+## [3.4.2](https://github.com/Ousret/charset_normalizer/compare/3.4.1...3.4.2) (2025-05-02)
+
+### Fixed
+- Addressed the DeprecationWarning in our CLI regarding `argparse.FileType` by backporting the target class into the package. (#591)
+- Improved the overall reliability of the detector with CJK Ideographs. (#605) (#587)
+
+### Changed
+- Optional mypyc compilation upgraded to version 1.15 for Python >= 3.8
+
+## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)
+
+### Changed
+- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.
+- Enforce annotation delayed loading for a simpler and consistent types in the project.
+- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8
+
+### Added
+- pre-commit configuration.
+- noxfile.
+
+### Removed
+- `build-requirements.txt` as per using `pyproject.toml` native build configuration.
+- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).
+- `setup.cfg` in favor of `pyproject.toml` metadata configuration.
+- Unused `utils.range_scan` function.
+
+### Fixed
+- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)
+- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+
+
+## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)
+
+### Added
+- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.
+- Support for Python 3.13 (#512)
+
+### Fixed
+- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.
+- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)
+- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)
+
+## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)
+
+### Fixed
+- Unintentional memory usage regression when using large payload that match several encoding (#376)
+- Regression on some detection case showcased in the documentation (#371)
+
+### Added
+- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)
+
+## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)
+
+### Changed
+- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8
+- Improved the general detection reliability based on reports from the community
+
+## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)
+
+### Added
+- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`
+- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)
+
+### Removed
+- (internal) Redundant utils.is_ascii function and unused function is_private_use_only
+- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant
+
+### Changed
+- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection
+- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8
+
+### Fixed
+- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350)
+
+## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
+
+### Changed
+- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
+- Minor improvement over the global detection reliability
+
+### Added
+- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
+- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
+- Explicit support for Python 3.12
+
+### Fixed
+- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
+
+## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
+
+### Added
+- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
+
+### Removed
+- Support for Python 3.6 (PR #260)
+
+### Changed
+- Optional speedup provided by mypy/c 1.0.1
+
+## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
+
+### Fixed
+- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
+
+### Changed
+- Speedup provided by mypy/c 0.990 on Python >= 3.7
+
+## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+- Sphinx warnings when generating the documentation
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+
+## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
+
+### Added
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Removed
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+
+### Fixed
+- Sphinx warnings when generating the documentation
+
+## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
+
+### Changed
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Removed
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
+
+### Deprecated
+- Function `normalize` scheduled for removal in 3.0
+
+### Changed
+- Removed useless call to decode in fn is_unprintable (#206)
+
+### Fixed
+- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
+
+## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
+
+### Added
+- Output the Unicode table version when running the CLI with `--version` (PR #194)
+
+### Changed
+- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
+- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
+
+### Fixed
+- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
+- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
+
+### Removed
+- Support for Python 3.5 (PR #192)
+
+### Deprecated
+- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
+
+## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
+
+### Fixed
+- ASCII miss-detection on rare cases (PR #170)
+
+## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
+
+### Added
+- Explicit support for Python 3.11 (PR #164)
+
+### Changed
+- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
+
+## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
+
+### Fixed
+- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
+
+### Changed
+- Skipping the language-detection (CD) on ASCII (PR #155)
+
+## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
+
+### Changed
+- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
+
+### Fixed
+- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
+
+## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
+### Changed
+- Improvement over Vietnamese detection (PR #126)
+- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
+- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
+- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
+- Code style as refactored by Sourcery-AI (PR #131)
+- Minor adjustment on the MD around european words (PR #133)
+- Remove and replace SRTs from assets / tests (PR #139)
+- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
+
+### Fixed
+- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
+- Avoid using too insignificant chunk (PR #137)
+
+### Added
+- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
+
+## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
+### Added
+- Add support for Kazakh (Cyrillic) language detection (PR #109)
+
+### Changed
+- Further, improve inferring the language from a given single-byte code page (PR #112)
+- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
+- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
+- Various detection improvement (MD+CD) (PR #117)
+
+### Removed
+- Remove redundant logging entry about detected language(s) (PR #115)
+
+### Fixed
+- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
+
+## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
+### Fixed
+- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
+- Fix CLI crash when using --minimal output in certain cases (PR #103)
+
+### Changed
+- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
+
+## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
+### Changed
+- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
+- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
+- The Unicode detection is slightly improved (PR #93)
+- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
+
+### Removed
+- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
+
+### Fixed
+- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
+- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
+- The MANIFEST.in was not exhaustive (PR #78)
+
+## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
+### Fixed
+- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
+- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
+- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
+- Submatch factoring could be wrong in rare edge cases (PR #72)
+- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
+- Fix line endings from CRLF to LF for certain project files (PR #67)
+
+### Changed
+- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
+- Allow fallback on specified encoding if any (PR #71)
+
+## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
+### Changed
+- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
+- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
+
+## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
+### Fixed
+- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
+
+### Changed
+- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
+
+## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
+### Fixed
+- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
+- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
+- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
+- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
+
+### Changed
+- Public function normalize default args values were not aligned with from_bytes (PR #53)
+
+### Added
+- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
+
+## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
+### Changed
+- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
+- Accent has been made on UTF-8 detection, should perform rather instantaneous.
+- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
+- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
+- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
+- utf_7 detection has been reinstated.
+
+### Removed
+- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
+- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
+- The exception hook on UnicodeDecodeError has been removed.
+
+### Deprecated
+- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
+
+### Fixed
+- The CLI output used the relative path of the file(s). Should be absolute.
+
+## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
+### Fixed
+- Logger configuration/usage no longer conflict with others (PR #44)
+
+## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
+### Removed
+- Using standard logging instead of using the package loguru.
+- Dropping nose test framework in favor of the maintained pytest.
+- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
+- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
+- Stop support for UTF-7 that does not contain a SIG.
+- Dropping PrettyTable, replaced with pure JSON output in CLI.
+
+### Fixed
+- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
+- Not searching properly for the BOM when trying utf32/16 parent codec.
+
+### Changed
+- Improving the package final size by compressing frequencies.json.
+- Huge improvement over the larges payload.
+
+### Added
+- CLI now produces JSON consumable output.
+- Return ASCII if given sequences fit. Given reasonable confidence.
+
+## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
+
+### Fixed
+- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
+
+## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
+
+### Fixed
+- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
+
+## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
+
+### Fixed
+- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
+
+## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
+
+### Changed
+- Amend the previous release to allow prettytable 2.0 (PR #35)
+
+## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
+
+### Fixed
+- Fix error while using the package with a python pre-release interpreter (PR #33)
+
+### Changed
+- Dependencies refactoring, constraints revised.
+
+### Added
+- Add python 3.9 and 3.10 to the supported interpreters
+
+MIT License
+
+Copyright (c) 2025 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD"
new file mode 100644
index 0000000..16ff4aa
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/RECORD"
@@ -0,0 +1,35 @@
+../../Scripts/normalizer.exe,sha256=RLFKRlToWJbG2cIuc8LwiH9p57bvbjtx4XeLs3JgbRY,108422
+charset_normalizer-3.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+charset_normalizer-3.4.4.dist-info/METADATA,sha256=Mg5oc0yfpVMtDcprHt_pPbbV0qUSHEeaEz4NG53pmyY,38067
+charset_normalizer-3.4.4.dist-info/RECORD,,
+charset_normalizer-3.4.4.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101
+charset_normalizer-3.4.4.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65
+charset_normalizer-3.4.4.dist-info/licenses/LICENSE,sha256=GFd0hdNwTxpHne2OVzwJds_tMV_S_ReYP6mI2kwvcNE,1092
+charset_normalizer-3.4.4.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
+charset_normalizer/__init__.py,sha256=0NT8MHi7SKq3juMqYfOdrkzjisK0L73lneNHH4qaUAs,1638
+charset_normalizer/__main__.py,sha256=2sj_BS6H0sU25C1bMqz9DVwa6kOK9lchSEbSU-_iu7M,115
+charset_normalizer/__pycache__/__init__.cpython-312.pyc,,
+charset_normalizer/__pycache__/__main__.cpython-312.pyc,,
+charset_normalizer/__pycache__/api.cpython-312.pyc,,
+charset_normalizer/__pycache__/cd.cpython-312.pyc,,
+charset_normalizer/__pycache__/constant.cpython-312.pyc,,
+charset_normalizer/__pycache__/legacy.cpython-312.pyc,,
+charset_normalizer/__pycache__/md.cpython-312.pyc,,
+charset_normalizer/__pycache__/models.cpython-312.pyc,,
+charset_normalizer/__pycache__/utils.cpython-312.pyc,,
+charset_normalizer/__pycache__/version.cpython-312.pyc,,
+charset_normalizer/api.py,sha256=ODy4hX78b3ldTl5sViYPU1yzQ5qkclfgSIFE8BtNrTI,23337
+charset_normalizer/cd.py,sha256=uq8nVxRpR6Guc16ACvOWtL8KO3w7vYaCh8hHisuOyTg,12917
+charset_normalizer/cli/__init__.py,sha256=d9MUx-1V_qD3x9igIy4JT4oC5CU0yjulk7QyZWeRFhg,144
+charset_normalizer/cli/__main__.py,sha256=-pdJCyPywouPyFsC8_eTSgTmvh1YEvgjsvy1WZ0XjaA,13027
+charset_normalizer/cli/__pycache__/__init__.cpython-312.pyc,,
+charset_normalizer/cli/__pycache__/__main__.cpython-312.pyc,,
+charset_normalizer/constant.py,sha256=mCJmYzpBU27Ut9kiNWWoBbhhxQ-aRVw3K7LSwoFwBGI,44728
+charset_normalizer/legacy.py,sha256=ui08NlKqAXU3Y7smK-NFJjEgRRQz9ruM7aNCbT0OOrE,2811
+charset_normalizer/md.cp312-win_amd64.pyd,sha256=dqU14JU7SKI0i4dyNqV5nPHQHLIUIsfxeULzU2fLXI8,10752
+charset_normalizer/md.py,sha256=LSuW2hNgXSgF7JGdRapLAHLuj6pABHiP85LTNAYmu7c,20780
+charset_normalizer/md__mypyc.cp312-win_amd64.pyd,sha256=CDDD_25vg5Sn3xcPlfwQ3mWrnyKzD50jg_DMKZuN8QE,126976
+charset_normalizer/models.py,sha256=ZR2PE-fqf6dASZfqdE5Uhkmr0o1MciSdXOjuNqwkmvg,12754
+charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer/utils.py,sha256=XtWIQeOuz7cnGebMzyi4Vvi1JtA84QBSIeR9PDzF7pw,12584
+charset_normalizer/version.py,sha256=MhW8dOLls4GbbxBUqeS1huc7Rth1ArKi4nS90qTFwz8,123
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL"
new file mode 100644
index 0000000..10ac2c2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt"
new file mode 100644
index 0000000..65619e7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/entry_points.txt"
@@ -0,0 +1,2 @@
+[console_scripts]
+normalizer = charset_normalizer.cli:cli_detect
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..9725772
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/licenses/LICENSE"
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2025 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt"
new file mode 100644
index 0000000..66958f0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer-3.4.4.dist-info/top_level.txt"
@@ -0,0 +1 @@
+charset_normalizer
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/__init__.py"
new file mode 100644
index 0000000..0d3a379
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/__init__.py"
@@ -0,0 +1,48 @@
+"""
+Charset-Normalizer
+~~~~~~~~~~~~~~
+The Real First Universal Charset Detector.
+A library that helps you read text from an unknown charset encoding.
+Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
+All IANA character set names for which the Python core library provides codecs are supported.
+
+Basic usage:
+ >>> from charset_normalizer import from_bytes
+ >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
+ >>> best_guess = results.best()
+ >>> str(best_guess)
+ 'Bсеки човек има право на образование. Oбразованието!'
+
+Others methods and usages are available - see the full documentation
+at <https://github.com/Ousret/charset_normalizer>.
+:copyright: (c) 2021 by Ahmed TAHRI
+:license: MIT, see LICENSE for more details.
+"""
+
+from __future__ import annotations
+
+import logging
+
+from .api import from_bytes, from_fp, from_path, is_binary
+from .legacy import detect
+from .models import CharsetMatch, CharsetMatches
+from .utils import set_logging_handler
+from .version import VERSION, __version__
+
+__all__ = (
+ "from_fp",
+ "from_path",
+ "from_bytes",
+ "is_binary",
+ "detect",
+ "CharsetMatch",
+ "CharsetMatches",
+ "__version__",
+ "VERSION",
+ "set_logging_handler",
+)
+
+# Attach a NullHandler to the top level logger by default
+# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
+
+logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/__main__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/__main__.py"
new file mode 100644
index 0000000..e0e76f7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/__main__.py"
@@ -0,0 +1,6 @@
+from __future__ import annotations
+
+from .cli import cli_detect
+
+if __name__ == "__main__":
+ cli_detect()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/api.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/api.py"
new file mode 100644
index 0000000..ebd9639
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/api.py"
@@ -0,0 +1,669 @@
+from __future__ import annotations
+
+import logging
+from os import PathLike
+from typing import BinaryIO
+
+from .cd import (
+ coherence_ratio,
+ encoding_languages,
+ mb_encoding_languages,
+ merge_coherence_ratios,
+)
+from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
+from .md import mess_ratio
+from .models import CharsetMatch, CharsetMatches
+from .utils import (
+ any_specified_encoding,
+ cut_sequence_chunks,
+ iana_name,
+ identify_sig_or_bom,
+ is_cp_similar,
+ is_multi_byte_encoding,
+ should_strip_sig_or_bom,
+)
+
+logger = logging.getLogger("charset_normalizer")
+explain_handler = logging.StreamHandler()
+explain_handler.setFormatter(
+ logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
+)
+
+
+def from_bytes(
+ sequences: bytes | bytearray,
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.2,
+ cp_isolation: list[str] | None = None,
+ cp_exclusion: list[str] | None = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Given a raw bytes sequence, return the best possibles charset usable to render str objects.
+ If there is no results, it is a strong indicator that the source is binary/not text.
+ By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence.
+ And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will.
+
+ The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page
+ but never take it for granted. Can improve the performance.
+
+ You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that
+ purpose.
+
+ This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32.
+ By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain'
+ toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging.
+ Custom logging format and handler can be set manually.
+ """
+
+ if not isinstance(sequences, (bytearray, bytes)):
+ raise TypeError(
+ "Expected object of type bytes or bytearray, got: {}".format(
+ type(sequences)
+ )
+ )
+
+ if explain:
+ previous_logger_level: int = logger.level
+ logger.addHandler(explain_handler)
+ logger.setLevel(TRACE)
+
+ length: int = len(sequences)
+
+ if length == 0:
+ logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
+ if explain: # Defensive: ensure exit path clean handler
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level or logging.WARNING)
+ return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
+
+ if cp_isolation is not None:
+ logger.log(
+ TRACE,
+ "cp_isolation is set. use this flag for debugging purpose. "
+ "limited list of encoding allowed : %s.",
+ ", ".join(cp_isolation),
+ )
+ cp_isolation = [iana_name(cp, False) for cp in cp_isolation]
+ else:
+ cp_isolation = []
+
+ if cp_exclusion is not None:
+ logger.log(
+ TRACE,
+ "cp_exclusion is set. use this flag for debugging purpose. "
+ "limited list of encoding excluded : %s.",
+ ", ".join(cp_exclusion),
+ )
+ cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion]
+ else:
+ cp_exclusion = []
+
+ if length <= (chunk_size * steps):
+ logger.log(
+ TRACE,
+ "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
+ steps,
+ chunk_size,
+ length,
+ )
+ steps = 1
+ chunk_size = length
+
+ if steps > 1 and length / steps < chunk_size:
+ chunk_size = int(length / steps)
+
+ is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE
+ is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE
+
+ if is_too_small_sequence:
+ logger.log(
+ TRACE,
+ "Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
+ length
+ ),
+ )
+ elif is_too_large_sequence:
+ logger.log(
+ TRACE,
+ "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
+ length
+ ),
+ )
+
+ prioritized_encodings: list[str] = []
+
+ specified_encoding: str | None = (
+ any_specified_encoding(sequences) if preemptive_behaviour else None
+ )
+
+ if specified_encoding is not None:
+ prioritized_encodings.append(specified_encoding)
+ logger.log(
+ TRACE,
+ "Detected declarative mark in sequence. Priority +1 given for %s.",
+ specified_encoding,
+ )
+
+ tested: set[str] = set()
+ tested_but_hard_failure: list[str] = []
+ tested_but_soft_failure: list[str] = []
+
+ fallback_ascii: CharsetMatch | None = None
+ fallback_u8: CharsetMatch | None = None
+ fallback_specified: CharsetMatch | None = None
+
+ results: CharsetMatches = CharsetMatches()
+
+ early_stop_results: CharsetMatches = CharsetMatches()
+
+ sig_encoding, sig_payload = identify_sig_or_bom(sequences)
+
+ if sig_encoding is not None:
+ prioritized_encodings.append(sig_encoding)
+ logger.log(
+ TRACE,
+ "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
+ len(sig_payload),
+ sig_encoding,
+ )
+
+ prioritized_encodings.append("ascii")
+
+ if "utf_8" not in prioritized_encodings:
+ prioritized_encodings.append("utf_8")
+
+ for encoding_iana in prioritized_encodings + IANA_SUPPORTED:
+ if cp_isolation and encoding_iana not in cp_isolation:
+ continue
+
+ if cp_exclusion and encoding_iana in cp_exclusion:
+ continue
+
+ if encoding_iana in tested:
+ continue
+
+ tested.add(encoding_iana)
+
+ decoded_payload: str | None = None
+ bom_or_sig_available: bool = sig_encoding == encoding_iana
+ strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom(
+ encoding_iana
+ )
+
+ if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
+ encoding_iana,
+ )
+ continue
+ if encoding_iana in {"utf_7"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana)
+ except (ModuleNotFoundError, ImportError):
+ logger.log(
+ TRACE,
+ "Encoding %s does not provide an IncrementalDecoder",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ if is_too_large_sequence and is_multi_byte_decoder is False:
+ str(
+ (
+ sequences[: int(50e4)]
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) : int(50e4)]
+ ),
+ encoding=encoding_iana,
+ )
+ else:
+ decoded_payload = str(
+ (
+ sequences
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) :]
+ ),
+ encoding=encoding_iana,
+ )
+ except (UnicodeDecodeError, LookupError) as e:
+ if not isinstance(e, LookupError):
+ logger.log(
+ TRACE,
+ "Code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ similar_soft_failure_test: bool = False
+
+ for encoding_soft_failed in tested_but_soft_failure:
+ if is_cp_similar(encoding_iana, encoding_soft_failed):
+ similar_soft_failure_test = True
+ break
+
+ if similar_soft_failure_test:
+ logger.log(
+ TRACE,
+ "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
+ encoding_iana,
+ encoding_soft_failed,
+ )
+ continue
+
+ r_ = range(
+ 0 if not bom_or_sig_available else len(sig_payload),
+ length,
+ int(length / steps),
+ )
+
+ multi_byte_bonus: bool = (
+ is_multi_byte_decoder
+ and decoded_payload is not None
+ and len(decoded_payload) < length
+ )
+
+ if multi_byte_bonus:
+ logger.log(
+ TRACE,
+ "Code page %s is a multi byte encoding table and it appear that at least one character "
+ "was encoded using n-bytes.",
+ encoding_iana,
+ )
+
+ max_chunk_gave_up: int = int(len(r_) / 4)
+
+ max_chunk_gave_up = max(max_chunk_gave_up, 2)
+ early_stop_count: int = 0
+ lazy_str_hard_failure = False
+
+ md_chunks: list[str] = []
+ md_ratios = []
+
+ try:
+ for chunk in cut_sequence_chunks(
+ sequences,
+ encoding_iana,
+ r_,
+ chunk_size,
+ bom_or_sig_available,
+ strip_sig_or_bom,
+ sig_payload,
+ is_multi_byte_decoder,
+ decoded_payload,
+ ):
+ md_chunks.append(chunk)
+
+ md_ratios.append(
+ mess_ratio(
+ chunk,
+ threshold,
+ explain is True and 1 <= len(cp_isolation) <= 2,
+ )
+ )
+
+ if md_ratios[-1] >= threshold:
+ early_stop_count += 1
+
+ if (early_stop_count >= max_chunk_gave_up) or (
+ bom_or_sig_available and strip_sig_or_bom is False
+ ):
+ break
+ except (
+ UnicodeDecodeError
+ ) as e: # Lazy str loading may have missed something there
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ early_stop_count = max_chunk_gave_up
+ lazy_str_hard_failure = True
+
+ # We might want to check the sequence again with the whole content
+ # Only if initial MD tests passes
+ if (
+ not lazy_str_hard_failure
+ and is_too_large_sequence
+ and not is_multi_byte_decoder
+ ):
+ try:
+ sequences[int(50e3) :].decode(encoding_iana, errors="strict")
+ except UnicodeDecodeError as e:
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0
+ if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
+ tested_but_soft_failure.append(encoding_iana)
+ logger.log(
+ TRACE,
+ "%s was excluded because of initial chaos probing. Gave up %i time(s). "
+ "Computed mean chaos is %f %%.",
+ encoding_iana,
+ early_stop_count,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+ # Preparing those fallbacks in case we got nothing.
+ if (
+ enable_fallback
+ and encoding_iana
+ in ["ascii", "utf_8", specified_encoding, "utf_16", "utf_32"]
+ and not lazy_str_hard_failure
+ ):
+ fallback_entry = CharsetMatch(
+ sequences,
+ encoding_iana,
+ threshold,
+ bom_or_sig_available,
+ [],
+ decoded_payload,
+ preemptive_declaration=specified_encoding,
+ )
+ if encoding_iana == specified_encoding:
+ fallback_specified = fallback_entry
+ elif encoding_iana == "ascii":
+ fallback_ascii = fallback_entry
+ else:
+ fallback_u8 = fallback_entry
+ continue
+
+ logger.log(
+ TRACE,
+ "%s passed initial chaos probing. Mean measured chaos is %f %%",
+ encoding_iana,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+
+ if not is_multi_byte_decoder:
+ target_languages: list[str] = encoding_languages(encoding_iana)
+ else:
+ target_languages = mb_encoding_languages(encoding_iana)
+
+ if target_languages:
+ logger.log(
+ TRACE,
+ "{} should target any language(s) of {}".format(
+ encoding_iana, str(target_languages)
+ ),
+ )
+
+ cd_ratios = []
+
+ # We shall skip the CD when its about ASCII
+ # Most of the time its not relevant to run "language-detection" on it.
+ if encoding_iana != "ascii":
+ for chunk in md_chunks:
+ chunk_languages = coherence_ratio(
+ chunk,
+ language_threshold,
+ ",".join(target_languages) if target_languages else None,
+ )
+
+ cd_ratios.append(chunk_languages)
+
+ cd_ratios_merged = merge_coherence_ratios(cd_ratios)
+
+ if cd_ratios_merged:
+ logger.log(
+ TRACE,
+ "We detected language {} using {}".format(
+ cd_ratios_merged, encoding_iana
+ ),
+ )
+
+ current_match = CharsetMatch(
+ sequences,
+ encoding_iana,
+ mean_mess_ratio,
+ bom_or_sig_available,
+ cd_ratios_merged,
+ (
+ decoded_payload
+ if (
+ is_too_large_sequence is False
+ or encoding_iana in [specified_encoding, "ascii", "utf_8"]
+ )
+ else None
+ ),
+ preemptive_declaration=specified_encoding,
+ )
+
+ results.append(current_match)
+
+ if (
+ encoding_iana in [specified_encoding, "ascii", "utf_8"]
+ and mean_mess_ratio < 0.1
+ ):
+ # If md says nothing to worry about, then... stop immediately!
+ if mean_mess_ratio == 0.0:
+ logger.debug(
+ "Encoding detection: %s is most likely the one.",
+ current_match.encoding,
+ )
+ if explain: # Defensive: ensure exit path clean handler
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([current_match])
+
+ early_stop_results.append(current_match)
+
+ if (
+ len(early_stop_results)
+ and (specified_encoding is None or specified_encoding in tested)
+ and "ascii" in tested
+ and "utf_8" in tested
+ ):
+ probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
+ logger.debug(
+ "Encoding detection: %s is most likely the one.",
+ probable_result.encoding,
+ )
+ if explain: # Defensive: ensure exit path clean handler
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+
+ return CharsetMatches([probable_result])
+
+ if encoding_iana == sig_encoding:
+ logger.debug(
+ "Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
+ "the beginning of the sequence.",
+ encoding_iana,
+ )
+ if explain: # Defensive: ensure exit path clean handler
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([results[encoding_iana]])
+
+ if len(results) == 0:
+ if fallback_u8 or fallback_ascii or fallback_specified:
+ logger.log(
+ TRACE,
+ "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
+ )
+
+ if fallback_specified:
+ logger.debug(
+ "Encoding detection: %s will be used as a fallback match",
+ fallback_specified.encoding,
+ )
+ results.append(fallback_specified)
+ elif (
+ (fallback_u8 and fallback_ascii is None)
+ or (
+ fallback_u8
+ and fallback_ascii
+ and fallback_u8.fingerprint != fallback_ascii.fingerprint
+ )
+ or (fallback_u8 is not None)
+ ):
+ logger.debug("Encoding detection: utf_8 will be used as a fallback match")
+ results.append(fallback_u8)
+ elif fallback_ascii:
+ logger.debug("Encoding detection: ascii will be used as a fallback match")
+ results.append(fallback_ascii)
+
+ if results:
+ logger.debug(
+ "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
+ results.best().encoding, # type: ignore
+ len(results) - 1,
+ )
+ else:
+ logger.debug("Encoding detection: Unable to determine any suitable charset.")
+
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+
+ return results
+
+
+def from_fp(
+ fp: BinaryIO,
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: list[str] | None = None,
+ cp_exclusion: list[str] | None = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but using a file pointer that is already ready.
+ Will not close the file pointer.
+ """
+ return from_bytes(
+ fp.read(),
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ enable_fallback,
+ )
+
+
+def from_path(
+ path: str | bytes | PathLike, # type: ignore[type-arg]
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: list[str] | None = None,
+ cp_exclusion: list[str] | None = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
+ Can raise IOError.
+ """
+ with open(path, "rb") as fp:
+ return from_fp(
+ fp,
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ enable_fallback,
+ )
+
+
+def is_binary(
+ fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg]
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: list[str] | None = None,
+ cp_exclusion: list[str] | None = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = False,
+) -> bool:
+ """
+ Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string.
+ Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match
+ are disabled to be stricter around ASCII-compatible but unlikely to be a string.
+ """
+ if isinstance(fp_or_path_or_payload, (str, PathLike)):
+ guesses = from_path(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+ elif isinstance(
+ fp_or_path_or_payload,
+ (
+ bytes,
+ bytearray,
+ ),
+ ):
+ guesses = from_bytes(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+ else:
+ guesses = from_fp(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+
+ return not guesses
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cd.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cd.py"
new file mode 100644
index 0000000..71a3ed5
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cd.py"
@@ -0,0 +1,395 @@
+from __future__ import annotations
+
+import importlib
+from codecs import IncrementalDecoder
+from collections import Counter
+from functools import lru_cache
+from typing import Counter as TypeCounter
+
+from .constant import (
+ FREQUENCIES,
+ KO_NAMES,
+ LANGUAGE_SUPPORTED_COUNT,
+ TOO_SMALL_SEQUENCE,
+ ZH_NAMES,
+)
+from .md import is_suspiciously_successive_range
+from .models import CoherenceMatches
+from .utils import (
+ is_accentuated,
+ is_latin,
+ is_multi_byte_encoding,
+ is_unicode_range_secondary,
+ unicode_range,
+)
+
+
+def encoding_unicode_range(iana_name: str) -> list[str]:
+ """
+ Return associated unicode ranges in a single byte code page.
+ """
+ if is_multi_byte_encoding(iana_name):
+ raise OSError("Function not supported on multi-byte code page")
+
+ decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder
+
+ p: IncrementalDecoder = decoder(errors="ignore")
+ seen_ranges: dict[str, int] = {}
+ character_count: int = 0
+
+ for i in range(0x40, 0xFF):
+ chunk: str = p.decode(bytes([i]))
+
+ if chunk:
+ character_range: str | None = unicode_range(chunk)
+
+ if character_range is None:
+ continue
+
+ if is_unicode_range_secondary(character_range) is False:
+ if character_range not in seen_ranges:
+ seen_ranges[character_range] = 0
+ seen_ranges[character_range] += 1
+ character_count += 1
+
+ return sorted(
+ [
+ character_range
+ for character_range in seen_ranges
+ if seen_ranges[character_range] / character_count >= 0.15
+ ]
+ )
+
+
+def unicode_range_languages(primary_range: str) -> list[str]:
+ """
+ Return inferred languages used with a unicode range.
+ """
+ languages: list[str] = []
+
+ for language, characters in FREQUENCIES.items():
+ for character in characters:
+ if unicode_range(character) == primary_range:
+ languages.append(language)
+ break
+
+ return languages
+
+
+@lru_cache()
+def encoding_languages(iana_name: str) -> list[str]:
+ """
+ Single-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ unicode_ranges: list[str] = encoding_unicode_range(iana_name)
+ primary_range: str | None = None
+
+ for specified_range in unicode_ranges:
+ if "Latin" not in specified_range:
+ primary_range = specified_range
+ break
+
+ if primary_range is None:
+ return ["Latin Based"]
+
+ return unicode_range_languages(primary_range)
+
+
+@lru_cache()
+def mb_encoding_languages(iana_name: str) -> list[str]:
+ """
+ Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ if (
+ iana_name.startswith("shift_")
+ or iana_name.startswith("iso2022_jp")
+ or iana_name.startswith("euc_j")
+ or iana_name == "cp932"
+ ):
+ return ["Japanese"]
+ if iana_name.startswith("gb") or iana_name in ZH_NAMES:
+ return ["Chinese"]
+ if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
+ return ["Korean"]
+
+ return []
+
+
+@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
+def get_target_features(language: str) -> tuple[bool, bool]:
+ """
+ Determine main aspects from a supported language if it contains accents and if is pure Latin.
+ """
+ target_have_accents: bool = False
+ target_pure_latin: bool = True
+
+ for character in FREQUENCIES[language]:
+ if not target_have_accents and is_accentuated(character):
+ target_have_accents = True
+ if target_pure_latin and is_latin(character) is False:
+ target_pure_latin = False
+
+ return target_have_accents, target_pure_latin
+
+
+def alphabet_languages(
+ characters: list[str], ignore_non_latin: bool = False
+) -> list[str]:
+ """
+ Return associated languages associated to given characters.
+ """
+ languages: list[tuple[str, float]] = []
+
+ source_have_accents = any(is_accentuated(character) for character in characters)
+
+ for language, language_characters in FREQUENCIES.items():
+ target_have_accents, target_pure_latin = get_target_features(language)
+
+ if ignore_non_latin and target_pure_latin is False:
+ continue
+
+ if target_have_accents is False and source_have_accents:
+ continue
+
+ character_count: int = len(language_characters)
+
+ character_match_count: int = len(
+ [c for c in language_characters if c in characters]
+ )
+
+ ratio: float = character_match_count / character_count
+
+ if ratio >= 0.2:
+ languages.append((language, ratio))
+
+ languages = sorted(languages, key=lambda x: x[1], reverse=True)
+
+ return [compatible_language[0] for compatible_language in languages]
+
+
+def characters_popularity_compare(
+ language: str, ordered_characters: list[str]
+) -> float:
+ """
+ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
+ The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
+ Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
+ """
+ if language not in FREQUENCIES:
+ raise ValueError(f"{language} not available")
+
+ character_approved_count: int = 0
+ FREQUENCIES_language_set = set(FREQUENCIES[language])
+
+ ordered_characters_count: int = len(ordered_characters)
+ target_language_characters_count: int = len(FREQUENCIES[language])
+
+ large_alphabet: bool = target_language_characters_count > 26
+
+ for character, character_rank in zip(
+ ordered_characters, range(0, ordered_characters_count)
+ ):
+ if character not in FREQUENCIES_language_set:
+ continue
+
+ character_rank_in_language: int = FREQUENCIES[language].index(character)
+ expected_projection_ratio: float = (
+ target_language_characters_count / ordered_characters_count
+ )
+ character_rank_projection: int = int(character_rank * expected_projection_ratio)
+
+ if (
+ large_alphabet is False
+ and abs(character_rank_projection - character_rank_in_language) > 4
+ ):
+ continue
+
+ if (
+ large_alphabet is True
+ and abs(character_rank_projection - character_rank_in_language)
+ < target_language_characters_count / 3
+ ):
+ character_approved_count += 1
+ continue
+
+ characters_before_source: list[str] = FREQUENCIES[language][
+ 0:character_rank_in_language
+ ]
+ characters_after_source: list[str] = FREQUENCIES[language][
+ character_rank_in_language:
+ ]
+ characters_before: list[str] = ordered_characters[0:character_rank]
+ characters_after: list[str] = ordered_characters[character_rank:]
+
+ before_match_count: int = len(
+ set(characters_before) & set(characters_before_source)
+ )
+
+ after_match_count: int = len(
+ set(characters_after) & set(characters_after_source)
+ )
+
+ if len(characters_before_source) == 0 and before_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if len(characters_after_source) == 0 and after_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if (
+ before_match_count / len(characters_before_source) >= 0.4
+ or after_match_count / len(characters_after_source) >= 0.4
+ ):
+ character_approved_count += 1
+ continue
+
+ return character_approved_count / len(ordered_characters)
+
+
+def alpha_unicode_split(decoded_sequence: str) -> list[str]:
+ """
+ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
+ Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
+ One containing the latin letters and the other hebrew.
+ """
+ layers: dict[str, str] = {}
+
+ for character in decoded_sequence:
+ if character.isalpha() is False:
+ continue
+
+ character_range: str | None = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ layer_target_range: str | None = None
+
+ for discovered_range in layers:
+ if (
+ is_suspiciously_successive_range(discovered_range, character_range)
+ is False
+ ):
+ layer_target_range = discovered_range
+ break
+
+ if layer_target_range is None:
+ layer_target_range = character_range
+
+ if layer_target_range not in layers:
+ layers[layer_target_range] = character.lower()
+ continue
+
+ layers[layer_target_range] += character.lower()
+
+ return list(layers.values())
+
+
+def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches:
+ """
+ This function merge results previously given by the function coherence_ratio.
+ The return type is the same as coherence_ratio.
+ """
+ per_language_ratios: dict[str, list[float]] = {}
+ for result in results:
+ for sub_result in result:
+ language, ratio = sub_result
+ if language not in per_language_ratios:
+ per_language_ratios[language] = [ratio]
+ continue
+ per_language_ratios[language].append(ratio)
+
+ merge = [
+ (
+ language,
+ round(
+ sum(per_language_ratios[language]) / len(per_language_ratios[language]),
+ 4,
+ ),
+ )
+ for language in per_language_ratios
+ ]
+
+ return sorted(merge, key=lambda x: x[1], reverse=True)
+
+
+def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
+ """
+ We shall NOT return "English—" in CoherenceMatches because it is an alternative
+ of "English". This function only keeps the best match and remove the em-dash in it.
+ """
+ index_results: dict[str, list[float]] = dict()
+
+ for result in results:
+ language, ratio = result
+ no_em_name: str = language.replace("—", "")
+
+ if no_em_name not in index_results:
+ index_results[no_em_name] = []
+
+ index_results[no_em_name].append(ratio)
+
+ if any(len(index_results[e]) > 1 for e in index_results):
+ filtered_results: CoherenceMatches = []
+
+ for language in index_results:
+ filtered_results.append((language, max(index_results[language])))
+
+ return filtered_results
+
+ return results
+
+
+@lru_cache(maxsize=2048)
+def coherence_ratio(
+ decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None
+) -> CoherenceMatches:
+ """
+ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
+ A layer = Character extraction by alphabets/ranges.
+ """
+
+ results: list[tuple[str, float]] = []
+ ignore_non_latin: bool = False
+
+ sufficient_match_count: int = 0
+
+ lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
+ if "Latin Based" in lg_inclusion_list:
+ ignore_non_latin = True
+ lg_inclusion_list.remove("Latin Based")
+
+ for layer in alpha_unicode_split(decoded_sequence):
+ sequence_frequencies: TypeCounter[str] = Counter(layer)
+ most_common = sequence_frequencies.most_common()
+
+ character_count: int = sum(o for c, o in most_common)
+
+ if character_count <= TOO_SMALL_SEQUENCE:
+ continue
+
+ popular_character_ordered: list[str] = [c for c, o in most_common]
+
+ for language in lg_inclusion_list or alphabet_languages(
+ popular_character_ordered, ignore_non_latin
+ ):
+ ratio: float = characters_popularity_compare(
+ language, popular_character_ordered
+ )
+
+ if ratio < threshold:
+ continue
+ elif ratio >= 0.8:
+ sufficient_match_count += 1
+
+ results.append((language, round(ratio, 4)))
+
+ if sufficient_match_count >= 3:
+ break
+
+ return sorted(
+ filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__init__.py"
new file mode 100644
index 0000000..543a5a4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__init__.py"
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from .__main__ import cli_detect, query_yes_no
+
+__all__ = (
+ "cli_detect",
+ "query_yes_no",
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__main__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__main__.py"
new file mode 100644
index 0000000..cb64156
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/cli/__main__.py"
@@ -0,0 +1,381 @@
+from __future__ import annotations
+
+import argparse
+import sys
+import typing
+from json import dumps
+from os.path import abspath, basename, dirname, join, realpath
+from platform import python_version
+from unicodedata import unidata_version
+
+import charset_normalizer.md as md_module
+from charset_normalizer import from_fp
+from charset_normalizer.models import CliDetectionResult
+from charset_normalizer.version import __version__
+
+
+def query_yes_no(question: str, default: str = "yes") -> bool:
+ """Ask a yes/no question via input() and return their answer.
+
+ "question" is a string that is presented to the user.
+ "default" is the presumed answer if the user just hits <Enter>.
+ It must be "yes" (the default), "no" or None (meaning
+ an answer is required of the user).
+
+ The "answer" return value is True for "yes" or False for "no".
+
+ Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
+ """
+ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
+ if default is None:
+ prompt = " [y/n] "
+ elif default == "yes":
+ prompt = " [Y/n] "
+ elif default == "no":
+ prompt = " [y/N] "
+ else:
+ raise ValueError("invalid default answer: '%s'" % default)
+
+ while True:
+ sys.stdout.write(question + prompt)
+ choice = input().lower()
+ if default is not None and choice == "":
+ return valid[default]
+ elif choice in valid:
+ return valid[choice]
+ else:
+ sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
+
+
+class FileType:
+ """Factory for creating file object types
+
+ Instances of FileType are typically passed as type= arguments to the
+ ArgumentParser add_argument() method.
+
+ Keyword Arguments:
+ - mode -- A string indicating how the file is to be opened. Accepts the
+ same values as the builtin open() function.
+ - bufsize -- The file's desired buffer size. Accepts the same values as
+ the builtin open() function.
+ - encoding -- The file's encoding. Accepts the same values as the
+ builtin open() function.
+ - errors -- A string indicating how encoding and decoding errors are to
+ be handled. Accepts the same value as the builtin open() function.
+
+ Backported from CPython 3.12
+ """
+
+ def __init__(
+ self,
+ mode: str = "r",
+ bufsize: int = -1,
+ encoding: str | None = None,
+ errors: str | None = None,
+ ):
+ self._mode = mode
+ self._bufsize = bufsize
+ self._encoding = encoding
+ self._errors = errors
+
+ def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg]
+ # the special argument "-" means sys.std{in,out}
+ if string == "-":
+ if "r" in self._mode:
+ return sys.stdin.buffer if "b" in self._mode else sys.stdin
+ elif any(c in self._mode for c in "wax"):
+ return sys.stdout.buffer if "b" in self._mode else sys.stdout
+ else:
+ msg = f'argument "-" with mode {self._mode}'
+ raise ValueError(msg)
+
+ # all other arguments are used as file names
+ try:
+ return open(string, self._mode, self._bufsize, self._encoding, self._errors)
+ except OSError as e:
+ message = f"can't open '{string}': {e}"
+ raise argparse.ArgumentTypeError(message)
+
+ def __repr__(self) -> str:
+ args = self._mode, self._bufsize
+ kwargs = [("encoding", self._encoding), ("errors", self._errors)]
+ args_str = ", ".join(
+ [repr(arg) for arg in args if arg != -1]
+ + [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None]
+ )
+ return f"{type(self).__name__}({args_str})"
+
+
+def cli_detect(argv: list[str] | None = None) -> int:
+ """
+ CLI assistant using ARGV and ArgumentParser
+ :param argv:
+ :return: 0 if everything is fine, anything else equal trouble
+ """
+ parser = argparse.ArgumentParser(
+ description="The Real First Universal Charset Detector. "
+ "Discover originating encoding used on text file. "
+ "Normalize text to unicode."
+ )
+
+ parser.add_argument(
+ "files", type=FileType("rb"), nargs="+", help="File(s) to be analysed"
+ )
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ dest="verbose",
+ help="Display complementary information about file if any. "
+ "Stdout will contain logs about the detection process.",
+ )
+ parser.add_argument(
+ "-a",
+ "--with-alternative",
+ action="store_true",
+ default=False,
+ dest="alternatives",
+ help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
+ )
+ parser.add_argument(
+ "-n",
+ "--normalize",
+ action="store_true",
+ default=False,
+ dest="normalize",
+ help="Permit to normalize input file. If not set, program does not write anything.",
+ )
+ parser.add_argument(
+ "-m",
+ "--minimal",
+ action="store_true",
+ default=False,
+ dest="minimal",
+ help="Only output the charset detected to STDOUT. Disabling JSON output.",
+ )
+ parser.add_argument(
+ "-r",
+ "--replace",
+ action="store_true",
+ default=False,
+ dest="replace",
+ help="Replace file when trying to normalize it instead of creating a new one.",
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ default=False,
+ dest="force",
+ help="Replace file without asking if you are sure, use this flag with caution.",
+ )
+ parser.add_argument(
+ "-i",
+ "--no-preemptive",
+ action="store_true",
+ default=False,
+ dest="no_preemptive",
+ help="Disable looking at a charset declaration to hint the detector.",
+ )
+ parser.add_argument(
+ "-t",
+ "--threshold",
+ action="store",
+ default=0.2,
+ type=float,
+ dest="threshold",
+ help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.",
+ )
+ parser.add_argument(
+ "--version",
+ action="version",
+ version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
+ __version__,
+ python_version(),
+ unidata_version,
+ "OFF" if md_module.__file__.lower().endswith(".py") else "ON",
+ ),
+ help="Show version information and exit.",
+ )
+
+ args = parser.parse_args(argv)
+
+ if args.replace is True and args.normalize is False:
+ if args.files:
+ for my_file in args.files:
+ my_file.close()
+ print("Use --replace in addition of --normalize only.", file=sys.stderr)
+ return 1
+
+ if args.force is True and args.replace is False:
+ if args.files:
+ for my_file in args.files:
+ my_file.close()
+ print("Use --force in addition of --replace only.", file=sys.stderr)
+ return 1
+
+ if args.threshold < 0.0 or args.threshold > 1.0:
+ if args.files:
+ for my_file in args.files:
+ my_file.close()
+ print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
+ return 1
+
+ x_ = []
+
+ for my_file in args.files:
+ matches = from_fp(
+ my_file,
+ threshold=args.threshold,
+ explain=args.verbose,
+ preemptive_behaviour=args.no_preemptive is False,
+ )
+
+ best_guess = matches.best()
+
+ if best_guess is None:
+ print(
+ 'Unable to identify originating encoding for "{}". {}'.format(
+ my_file.name,
+ (
+ "Maybe try increasing maximum amount of chaos."
+ if args.threshold < 1.0
+ else ""
+ ),
+ ),
+ file=sys.stderr,
+ )
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ None,
+ [],
+ [],
+ "Unknown",
+ [],
+ False,
+ 1.0,
+ 0.0,
+ None,
+ True,
+ )
+ )
+ else:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ best_guess.encoding,
+ best_guess.encoding_aliases,
+ [
+ cp
+ for cp in best_guess.could_be_from_charset
+ if cp != best_guess.encoding
+ ],
+ best_guess.language,
+ best_guess.alphabets,
+ best_guess.bom,
+ best_guess.percent_chaos,
+ best_guess.percent_coherence,
+ None,
+ True,
+ )
+ )
+
+ if len(matches) > 1 and args.alternatives:
+ for el in matches:
+ if el != best_guess:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ el.encoding,
+ el.encoding_aliases,
+ [
+ cp
+ for cp in el.could_be_from_charset
+ if cp != el.encoding
+ ],
+ el.language,
+ el.alphabets,
+ el.bom,
+ el.percent_chaos,
+ el.percent_coherence,
+ None,
+ False,
+ )
+ )
+
+ if args.normalize is True:
+ if best_guess.encoding.startswith("utf") is True:
+ print(
+ '"{}" file does not need to be normalized, as it already came from unicode.'.format(
+ my_file.name
+ ),
+ file=sys.stderr,
+ )
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ dir_path = dirname(realpath(my_file.name))
+ file_name = basename(realpath(my_file.name))
+
+ o_: list[str] = file_name.split(".")
+
+ if args.replace is False:
+ o_.insert(-1, best_guess.encoding)
+ if my_file.closed is False:
+ my_file.close()
+ elif (
+ args.force is False
+ and query_yes_no(
+ 'Are you sure to normalize "{}" by replacing it ?'.format(
+ my_file.name
+ ),
+ "no",
+ )
+ is False
+ ):
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ try:
+ x_[0].unicode_path = join(dir_path, ".".join(o_))
+
+ with open(x_[0].unicode_path, "wb") as fp:
+ fp.write(best_guess.output())
+ except OSError as e:
+ print(str(e), file=sys.stderr)
+ if my_file.closed is False:
+ my_file.close()
+ return 2
+
+ if my_file.closed is False:
+ my_file.close()
+
+ if args.minimal is False:
+ print(
+ dumps(
+ [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
+ ensure_ascii=True,
+ indent=4,
+ )
+ )
+ else:
+ for my_file in args.files:
+ print(
+ ", ".join(
+ [
+ el.encoding or "undefined"
+ for el in x_
+ if el.path == abspath(my_file.name)
+ ]
+ )
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ cli_detect()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/constant.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/constant.py"
new file mode 100644
index 0000000..cc71a01
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/constant.py"
@@ -0,0 +1,2015 @@
+from __future__ import annotations
+
+from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
+from encodings.aliases import aliases
+from re import IGNORECASE
+from re import compile as re_compile
+
+# Contain for each eligible encoding a list of/item bytes SIG/BOM
+ENCODING_MARKS: dict[str, bytes | list[bytes]] = {
+ "utf_8": BOM_UTF8,
+ "utf_7": [
+ b"\x2b\x2f\x76\x38",
+ b"\x2b\x2f\x76\x39",
+ b"\x2b\x2f\x76\x2b",
+ b"\x2b\x2f\x76\x2f",
+ b"\x2b\x2f\x76\x38\x2d",
+ ],
+ "gb18030": b"\x84\x31\x95\x33",
+ "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE],
+ "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE],
+}
+
+TOO_SMALL_SEQUENCE: int = 32
+TOO_BIG_SEQUENCE: int = int(10e6)
+
+UTF8_MAXIMAL_ALLOCATION: int = 1_112_064
+
+# Up-to-date Unicode ucd/15.0.0
+UNICODE_RANGES_COMBINED: dict[str, range] = {
+ "Control character": range(32),
+ "Basic Latin": range(32, 128),
+ "Latin-1 Supplement": range(128, 256),
+ "Latin Extended-A": range(256, 384),
+ "Latin Extended-B": range(384, 592),
+ "IPA Extensions": range(592, 688),
+ "Spacing Modifier Letters": range(688, 768),
+ "Combining Diacritical Marks": range(768, 880),
+ "Greek and Coptic": range(880, 1024),
+ "Cyrillic": range(1024, 1280),
+ "Cyrillic Supplement": range(1280, 1328),
+ "Armenian": range(1328, 1424),
+ "Hebrew": range(1424, 1536),
+ "Arabic": range(1536, 1792),
+ "Syriac": range(1792, 1872),
+ "Arabic Supplement": range(1872, 1920),
+ "Thaana": range(1920, 1984),
+ "NKo": range(1984, 2048),
+ "Samaritan": range(2048, 2112),
+ "Mandaic": range(2112, 2144),
+ "Syriac Supplement": range(2144, 2160),
+ "Arabic Extended-B": range(2160, 2208),
+ "Arabic Extended-A": range(2208, 2304),
+ "Devanagari": range(2304, 2432),
+ "Bengali": range(2432, 2560),
+ "Gurmukhi": range(2560, 2688),
+ "Gujarati": range(2688, 2816),
+ "Oriya": range(2816, 2944),
+ "Tamil": range(2944, 3072),
+ "Telugu": range(3072, 3200),
+ "Kannada": range(3200, 3328),
+ "Malayalam": range(3328, 3456),
+ "Sinhala": range(3456, 3584),
+ "Thai": range(3584, 3712),
+ "Lao": range(3712, 3840),
+ "Tibetan": range(3840, 4096),
+ "Myanmar": range(4096, 4256),
+ "Georgian": range(4256, 4352),
+ "Hangul Jamo": range(4352, 4608),
+ "Ethiopic": range(4608, 4992),
+ "Ethiopic Supplement": range(4992, 5024),
+ "Cherokee": range(5024, 5120),
+ "Unified Canadian Aboriginal Syllabics": range(5120, 5760),
+ "Ogham": range(5760, 5792),
+ "Runic": range(5792, 5888),
+ "Tagalog": range(5888, 5920),
+ "Hanunoo": range(5920, 5952),
+ "Buhid": range(5952, 5984),
+ "Tagbanwa": range(5984, 6016),
+ "Khmer": range(6016, 6144),
+ "Mongolian": range(6144, 6320),
+ "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400),
+ "Limbu": range(6400, 6480),
+ "Tai Le": range(6480, 6528),
+ "New Tai Lue": range(6528, 6624),
+ "Khmer Symbols": range(6624, 6656),
+ "Buginese": range(6656, 6688),
+ "Tai Tham": range(6688, 6832),
+ "Combining Diacritical Marks Extended": range(6832, 6912),
+ "Balinese": range(6912, 7040),
+ "Sundanese": range(7040, 7104),
+ "Batak": range(7104, 7168),
+ "Lepcha": range(7168, 7248),
+ "Ol Chiki": range(7248, 7296),
+ "Cyrillic Extended-C": range(7296, 7312),
+ "Georgian Extended": range(7312, 7360),
+ "Sundanese Supplement": range(7360, 7376),
+ "Vedic Extensions": range(7376, 7424),
+ "Phonetic Extensions": range(7424, 7552),
+ "Phonetic Extensions Supplement": range(7552, 7616),
+ "Combining Diacritical Marks Supplement": range(7616, 7680),
+ "Latin Extended Additional": range(7680, 7936),
+ "Greek Extended": range(7936, 8192),
+ "General Punctuation": range(8192, 8304),
+ "Superscripts and Subscripts": range(8304, 8352),
+ "Currency Symbols": range(8352, 8400),
+ "Combining Diacritical Marks for Symbols": range(8400, 8448),
+ "Letterlike Symbols": range(8448, 8528),
+ "Number Forms": range(8528, 8592),
+ "Arrows": range(8592, 8704),
+ "Mathematical Operators": range(8704, 8960),
+ "Miscellaneous Technical": range(8960, 9216),
+ "Control Pictures": range(9216, 9280),
+ "Optical Character Recognition": range(9280, 9312),
+ "Enclosed Alphanumerics": range(9312, 9472),
+ "Box Drawing": range(9472, 9600),
+ "Block Elements": range(9600, 9632),
+ "Geometric Shapes": range(9632, 9728),
+ "Miscellaneous Symbols": range(9728, 9984),
+ "Dingbats": range(9984, 10176),
+ "Miscellaneous Mathematical Symbols-A": range(10176, 10224),
+ "Supplemental Arrows-A": range(10224, 10240),
+ "Braille Patterns": range(10240, 10496),
+ "Supplemental Arrows-B": range(10496, 10624),
+ "Miscellaneous Mathematical Symbols-B": range(10624, 10752),
+ "Supplemental Mathematical Operators": range(10752, 11008),
+ "Miscellaneous Symbols and Arrows": range(11008, 11264),
+ "Glagolitic": range(11264, 11360),
+ "Latin Extended-C": range(11360, 11392),
+ "Coptic": range(11392, 11520),
+ "Georgian Supplement": range(11520, 11568),
+ "Tifinagh": range(11568, 11648),
+ "Ethiopic Extended": range(11648, 11744),
+ "Cyrillic Extended-A": range(11744, 11776),
+ "Supplemental Punctuation": range(11776, 11904),
+ "CJK Radicals Supplement": range(11904, 12032),
+ "Kangxi Radicals": range(12032, 12256),
+ "Ideographic Description Characters": range(12272, 12288),
+ "CJK Symbols and Punctuation": range(12288, 12352),
+ "Hiragana": range(12352, 12448),
+ "Katakana": range(12448, 12544),
+ "Bopomofo": range(12544, 12592),
+ "Hangul Compatibility Jamo": range(12592, 12688),
+ "Kanbun": range(12688, 12704),
+ "Bopomofo Extended": range(12704, 12736),
+ "CJK Strokes": range(12736, 12784),
+ "Katakana Phonetic Extensions": range(12784, 12800),
+ "Enclosed CJK Letters and Months": range(12800, 13056),
+ "CJK Compatibility": range(13056, 13312),
+ "CJK Unified Ideographs Extension A": range(13312, 19904),
+ "Yijing Hexagram Symbols": range(19904, 19968),
+ "CJK Unified Ideographs": range(19968, 40960),
+ "Yi Syllables": range(40960, 42128),
+ "Yi Radicals": range(42128, 42192),
+ "Lisu": range(42192, 42240),
+ "Vai": range(42240, 42560),
+ "Cyrillic Extended-B": range(42560, 42656),
+ "Bamum": range(42656, 42752),
+ "Modifier Tone Letters": range(42752, 42784),
+ "Latin Extended-D": range(42784, 43008),
+ "Syloti Nagri": range(43008, 43056),
+ "Common Indic Number Forms": range(43056, 43072),
+ "Phags-pa": range(43072, 43136),
+ "Saurashtra": range(43136, 43232),
+ "Devanagari Extended": range(43232, 43264),
+ "Kayah Li": range(43264, 43312),
+ "Rejang": range(43312, 43360),
+ "Hangul Jamo Extended-A": range(43360, 43392),
+ "Javanese": range(43392, 43488),
+ "Myanmar Extended-B": range(43488, 43520),
+ "Cham": range(43520, 43616),
+ "Myanmar Extended-A": range(43616, 43648),
+ "Tai Viet": range(43648, 43744),
+ "Meetei Mayek Extensions": range(43744, 43776),
+ "Ethiopic Extended-A": range(43776, 43824),
+ "Latin Extended-E": range(43824, 43888),
+ "Cherokee Supplement": range(43888, 43968),
+ "Meetei Mayek": range(43968, 44032),
+ "Hangul Syllables": range(44032, 55216),
+ "Hangul Jamo Extended-B": range(55216, 55296),
+ "High Surrogates": range(55296, 56192),
+ "High Private Use Surrogates": range(56192, 56320),
+ "Low Surrogates": range(56320, 57344),
+ "Private Use Area": range(57344, 63744),
+ "CJK Compatibility Ideographs": range(63744, 64256),
+ "Alphabetic Presentation Forms": range(64256, 64336),
+ "Arabic Presentation Forms-A": range(64336, 65024),
+ "Variation Selectors": range(65024, 65040),
+ "Vertical Forms": range(65040, 65056),
+ "Combining Half Marks": range(65056, 65072),
+ "CJK Compatibility Forms": range(65072, 65104),
+ "Small Form Variants": range(65104, 65136),
+ "Arabic Presentation Forms-B": range(65136, 65280),
+ "Halfwidth and Fullwidth Forms": range(65280, 65520),
+ "Specials": range(65520, 65536),
+ "Linear B Syllabary": range(65536, 65664),
+ "Linear B Ideograms": range(65664, 65792),
+ "Aegean Numbers": range(65792, 65856),
+ "Ancient Greek Numbers": range(65856, 65936),
+ "Ancient Symbols": range(65936, 66000),
+ "Phaistos Disc": range(66000, 66048),
+ "Lycian": range(66176, 66208),
+ "Carian": range(66208, 66272),
+ "Coptic Epact Numbers": range(66272, 66304),
+ "Old Italic": range(66304, 66352),
+ "Gothic": range(66352, 66384),
+ "Old Permic": range(66384, 66432),
+ "Ugaritic": range(66432, 66464),
+ "Old Persian": range(66464, 66528),
+ "Deseret": range(66560, 66640),
+ "Shavian": range(66640, 66688),
+ "Osmanya": range(66688, 66736),
+ "Osage": range(66736, 66816),
+ "Elbasan": range(66816, 66864),
+ "Caucasian Albanian": range(66864, 66928),
+ "Vithkuqi": range(66928, 67008),
+ "Linear A": range(67072, 67456),
+ "Latin Extended-F": range(67456, 67520),
+ "Cypriot Syllabary": range(67584, 67648),
+ "Imperial Aramaic": range(67648, 67680),
+ "Palmyrene": range(67680, 67712),
+ "Nabataean": range(67712, 67760),
+ "Hatran": range(67808, 67840),
+ "Phoenician": range(67840, 67872),
+ "Lydian": range(67872, 67904),
+ "Meroitic Hieroglyphs": range(67968, 68000),
+ "Meroitic Cursive": range(68000, 68096),
+ "Kharoshthi": range(68096, 68192),
+ "Old South Arabian": range(68192, 68224),
+ "Old North Arabian": range(68224, 68256),
+ "Manichaean": range(68288, 68352),
+ "Avestan": range(68352, 68416),
+ "Inscriptional Parthian": range(68416, 68448),
+ "Inscriptional Pahlavi": range(68448, 68480),
+ "Psalter Pahlavi": range(68480, 68528),
+ "Old Turkic": range(68608, 68688),
+ "Old Hungarian": range(68736, 68864),
+ "Hanifi Rohingya": range(68864, 68928),
+ "Rumi Numeral Symbols": range(69216, 69248),
+ "Yezidi": range(69248, 69312),
+ "Arabic Extended-C": range(69312, 69376),
+ "Old Sogdian": range(69376, 69424),
+ "Sogdian": range(69424, 69488),
+ "Old Uyghur": range(69488, 69552),
+ "Chorasmian": range(69552, 69600),
+ "Elymaic": range(69600, 69632),
+ "Brahmi": range(69632, 69760),
+ "Kaithi": range(69760, 69840),
+ "Sora Sompeng": range(69840, 69888),
+ "Chakma": range(69888, 69968),
+ "Mahajani": range(69968, 70016),
+ "Sharada": range(70016, 70112),
+ "Sinhala Archaic Numbers": range(70112, 70144),
+ "Khojki": range(70144, 70224),
+ "Multani": range(70272, 70320),
+ "Khudawadi": range(70320, 70400),
+ "Grantha": range(70400, 70528),
+ "Newa": range(70656, 70784),
+ "Tirhuta": range(70784, 70880),
+ "Siddham": range(71040, 71168),
+ "Modi": range(71168, 71264),
+ "Mongolian Supplement": range(71264, 71296),
+ "Takri": range(71296, 71376),
+ "Ahom": range(71424, 71504),
+ "Dogra": range(71680, 71760),
+ "Warang Citi": range(71840, 71936),
+ "Dives Akuru": range(71936, 72032),
+ "Nandinagari": range(72096, 72192),
+ "Zanabazar Square": range(72192, 72272),
+ "Soyombo": range(72272, 72368),
+ "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384),
+ "Pau Cin Hau": range(72384, 72448),
+ "Devanagari Extended-A": range(72448, 72544),
+ "Bhaiksuki": range(72704, 72816),
+ "Marchen": range(72816, 72896),
+ "Masaram Gondi": range(72960, 73056),
+ "Gunjala Gondi": range(73056, 73136),
+ "Makasar": range(73440, 73472),
+ "Kawi": range(73472, 73568),
+ "Lisu Supplement": range(73648, 73664),
+ "Tamil Supplement": range(73664, 73728),
+ "Cuneiform": range(73728, 74752),
+ "Cuneiform Numbers and Punctuation": range(74752, 74880),
+ "Early Dynastic Cuneiform": range(74880, 75088),
+ "Cypro-Minoan": range(77712, 77824),
+ "Egyptian Hieroglyphs": range(77824, 78896),
+ "Egyptian Hieroglyph Format Controls": range(78896, 78944),
+ "Anatolian Hieroglyphs": range(82944, 83584),
+ "Bamum Supplement": range(92160, 92736),
+ "Mro": range(92736, 92784),
+ "Tangsa": range(92784, 92880),
+ "Bassa Vah": range(92880, 92928),
+ "Pahawh Hmong": range(92928, 93072),
+ "Medefaidrin": range(93760, 93856),
+ "Miao": range(93952, 94112),
+ "Ideographic Symbols and Punctuation": range(94176, 94208),
+ "Tangut": range(94208, 100352),
+ "Tangut Components": range(100352, 101120),
+ "Khitan Small Script": range(101120, 101632),
+ "Tangut Supplement": range(101632, 101760),
+ "Kana Extended-B": range(110576, 110592),
+ "Kana Supplement": range(110592, 110848),
+ "Kana Extended-A": range(110848, 110896),
+ "Small Kana Extension": range(110896, 110960),
+ "Nushu": range(110960, 111360),
+ "Duployan": range(113664, 113824),
+ "Shorthand Format Controls": range(113824, 113840),
+ "Znamenny Musical Notation": range(118528, 118736),
+ "Byzantine Musical Symbols": range(118784, 119040),
+ "Musical Symbols": range(119040, 119296),
+ "Ancient Greek Musical Notation": range(119296, 119376),
+ "Kaktovik Numerals": range(119488, 119520),
+ "Mayan Numerals": range(119520, 119552),
+ "Tai Xuan Jing Symbols": range(119552, 119648),
+ "Counting Rod Numerals": range(119648, 119680),
+ "Mathematical Alphanumeric Symbols": range(119808, 120832),
+ "Sutton SignWriting": range(120832, 121520),
+ "Latin Extended-G": range(122624, 122880),
+ "Glagolitic Supplement": range(122880, 122928),
+ "Cyrillic Extended-D": range(122928, 123024),
+ "Nyiakeng Puachue Hmong": range(123136, 123216),
+ "Toto": range(123536, 123584),
+ "Wancho": range(123584, 123648),
+ "Nag Mundari": range(124112, 124160),
+ "Ethiopic Extended-B": range(124896, 124928),
+ "Mende Kikakui": range(124928, 125152),
+ "Adlam": range(125184, 125280),
+ "Indic Siyaq Numbers": range(126064, 126144),
+ "Ottoman Siyaq Numbers": range(126208, 126288),
+ "Arabic Mathematical Alphabetic Symbols": range(126464, 126720),
+ "Mahjong Tiles": range(126976, 127024),
+ "Domino Tiles": range(127024, 127136),
+ "Playing Cards": range(127136, 127232),
+ "Enclosed Alphanumeric Supplement": range(127232, 127488),
+ "Enclosed Ideographic Supplement": range(127488, 127744),
+ "Miscellaneous Symbols and Pictographs": range(127744, 128512),
+ "Emoticons range(Emoji)": range(128512, 128592),
+ "Ornamental Dingbats": range(128592, 128640),
+ "Transport and Map Symbols": range(128640, 128768),
+ "Alchemical Symbols": range(128768, 128896),
+ "Geometric Shapes Extended": range(128896, 129024),
+ "Supplemental Arrows-C": range(129024, 129280),
+ "Supplemental Symbols and Pictographs": range(129280, 129536),
+ "Chess Symbols": range(129536, 129648),
+ "Symbols and Pictographs Extended-A": range(129648, 129792),
+ "Symbols for Legacy Computing": range(129792, 130048),
+ "CJK Unified Ideographs Extension B": range(131072, 173792),
+ "CJK Unified Ideographs Extension C": range(173824, 177984),
+ "CJK Unified Ideographs Extension D": range(177984, 178208),
+ "CJK Unified Ideographs Extension E": range(178208, 183984),
+ "CJK Unified Ideographs Extension F": range(183984, 191472),
+ "CJK Compatibility Ideographs Supplement": range(194560, 195104),
+ "CJK Unified Ideographs Extension G": range(196608, 201552),
+ "CJK Unified Ideographs Extension H": range(201552, 205744),
+ "Tags": range(917504, 917632),
+ "Variation Selectors Supplement": range(917760, 918000),
+ "Supplementary Private Use Area-A": range(983040, 1048576),
+ "Supplementary Private Use Area-B": range(1048576, 1114112),
+}
+
+
+UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [
+ "Supplement",
+ "Extended",
+ "Extensions",
+ "Modifier",
+ "Marks",
+ "Punctuation",
+ "Symbols",
+ "Forms",
+ "Operators",
+ "Miscellaneous",
+ "Drawing",
+ "Block",
+ "Shapes",
+ "Supplemental",
+ "Tags",
+]
+
+RE_POSSIBLE_ENCODING_INDICATION = re_compile(
+ r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)",
+ IGNORECASE,
+)
+
+IANA_NO_ALIASES = [
+ "cp720",
+ "cp737",
+ "cp856",
+ "cp874",
+ "cp875",
+ "cp1006",
+ "koi8_r",
+ "koi8_t",
+ "koi8_u",
+]
+
+IANA_SUPPORTED: list[str] = sorted(
+ filter(
+ lambda x: x.endswith("_codec") is False
+ and x not in {"rot_13", "tactis", "mbcs"},
+ list(set(aliases.values())) + IANA_NO_ALIASES,
+ )
+)
+
+IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED)
+
+# pre-computed code page that are similar using the function cp_similarity.
+IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = {
+ "cp037": ["cp1026", "cp1140", "cp273", "cp500"],
+ "cp1026": ["cp037", "cp1140", "cp273", "cp500"],
+ "cp1125": ["cp866"],
+ "cp1140": ["cp037", "cp1026", "cp273", "cp500"],
+ "cp1250": ["iso8859_2"],
+ "cp1251": ["kz1048", "ptcp154"],
+ "cp1252": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1253": ["iso8859_7"],
+ "cp1254": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1257": ["iso8859_13"],
+ "cp273": ["cp037", "cp1026", "cp1140", "cp500"],
+ "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"],
+ "cp500": ["cp037", "cp1026", "cp1140", "cp273"],
+ "cp850": ["cp437", "cp857", "cp858", "cp865"],
+ "cp857": ["cp850", "cp858", "cp865"],
+ "cp858": ["cp437", "cp850", "cp857", "cp865"],
+ "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"],
+ "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"],
+ "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"],
+ "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"],
+ "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"],
+ "cp866": ["cp1125"],
+ "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"],
+ "iso8859_11": ["tis_620"],
+ "iso8859_13": ["cp1257"],
+ "iso8859_14": [
+ "iso8859_10",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_15": [
+ "cp1252",
+ "cp1254",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_16": [
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_2",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"],
+ "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"],
+ "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"],
+ "iso8859_7": ["cp1253"],
+ "iso8859_9": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "latin_1",
+ ],
+ "kz1048": ["cp1251", "ptcp154"],
+ "latin_1": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "iso8859_9",
+ ],
+ "mac_iceland": ["mac_roman", "mac_turkish"],
+ "mac_roman": ["mac_iceland", "mac_turkish"],
+ "mac_turkish": ["mac_iceland", "mac_roman"],
+ "ptcp154": ["cp1251", "kz1048"],
+ "tis_620": ["iso8859_11"],
+}
+
+
+CHARDET_CORRESPONDENCE: dict[str, str] = {
+ "iso2022_kr": "ISO-2022-KR",
+ "iso2022_jp": "ISO-2022-JP",
+ "euc_kr": "EUC-KR",
+ "tis_620": "TIS-620",
+ "utf_32": "UTF-32",
+ "euc_jp": "EUC-JP",
+ "koi8_r": "KOI8-R",
+ "iso8859_1": "ISO-8859-1",
+ "iso8859_2": "ISO-8859-2",
+ "iso8859_5": "ISO-8859-5",
+ "iso8859_6": "ISO-8859-6",
+ "iso8859_7": "ISO-8859-7",
+ "iso8859_8": "ISO-8859-8",
+ "utf_16": "UTF-16",
+ "cp855": "IBM855",
+ "mac_cyrillic": "MacCyrillic",
+ "gb2312": "GB2312",
+ "gb18030": "GB18030",
+ "cp932": "CP932",
+ "cp866": "IBM866",
+ "utf_8": "utf-8",
+ "utf_8_sig": "UTF-8-SIG",
+ "shift_jis": "SHIFT_JIS",
+ "big5": "Big5",
+ "cp1250": "windows-1250",
+ "cp1251": "windows-1251",
+ "cp1252": "Windows-1252",
+ "cp1253": "windows-1253",
+ "cp1255": "windows-1255",
+ "cp1256": "windows-1256",
+ "cp1254": "Windows-1254",
+ "cp949": "CP949",
+}
+
+
+COMMON_SAFE_ASCII_CHARACTERS: set[str] = {
+ "<",
+ ">",
+ "=",
+ ":",
+ "/",
+ "&",
+ ";",
+ "{",
+ "}",
+ "[",
+ "]",
+ ",",
+ "|",
+ '"',
+ "-",
+ "(",
+ ")",
+}
+
+# Sample character sets — replace with full lists if needed
+COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞"
+
+COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨"
+
+COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生"
+
+# Combine all into a set
+COMMON_CJK_CHARACTERS = set(
+ "".join(
+ [
+ COMMON_CHINESE_CHARACTERS,
+ COMMON_JAPANESE_CHARACTERS,
+ COMMON_KOREAN_CHARACTERS,
+ ]
+ )
+)
+
+KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"}
+ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"}
+
+# Logging LEVEL below DEBUG
+TRACE: int = 5
+
+
+# Language label that contain the em dash "—"
+# character are to be considered alternative seq to origin
+FREQUENCIES: dict[str, list[str]] = {
+ "English": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "u",
+ "m",
+ "f",
+ "p",
+ "g",
+ "w",
+ "y",
+ "b",
+ "v",
+ "k",
+ "x",
+ "j",
+ "z",
+ "q",
+ ],
+ "English—": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "m",
+ "u",
+ "f",
+ "p",
+ "g",
+ "w",
+ "b",
+ "y",
+ "v",
+ "k",
+ "j",
+ "x",
+ "z",
+ "q",
+ ],
+ "German": [
+ "e",
+ "n",
+ "i",
+ "r",
+ "s",
+ "t",
+ "a",
+ "d",
+ "h",
+ "u",
+ "l",
+ "g",
+ "o",
+ "c",
+ "m",
+ "b",
+ "f",
+ "k",
+ "w",
+ "z",
+ "p",
+ "v",
+ "ü",
+ "ä",
+ "ö",
+ "j",
+ ],
+ "French": [
+ "e",
+ "a",
+ "s",
+ "n",
+ "i",
+ "t",
+ "r",
+ "l",
+ "u",
+ "o",
+ "d",
+ "c",
+ "p",
+ "m",
+ "é",
+ "v",
+ "g",
+ "f",
+ "b",
+ "h",
+ "q",
+ "à",
+ "x",
+ "è",
+ "y",
+ "j",
+ ],
+ "Dutch": [
+ "e",
+ "n",
+ "a",
+ "i",
+ "r",
+ "t",
+ "o",
+ "d",
+ "s",
+ "l",
+ "g",
+ "h",
+ "v",
+ "m",
+ "u",
+ "k",
+ "c",
+ "p",
+ "b",
+ "w",
+ "j",
+ "z",
+ "f",
+ "y",
+ "x",
+ "ë",
+ ],
+ "Italian": [
+ "e",
+ "i",
+ "a",
+ "o",
+ "n",
+ "l",
+ "t",
+ "r",
+ "s",
+ "c",
+ "d",
+ "u",
+ "p",
+ "m",
+ "g",
+ "v",
+ "f",
+ "b",
+ "z",
+ "h",
+ "q",
+ "è",
+ "à",
+ "k",
+ "y",
+ "ò",
+ ],
+ "Polish": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "z",
+ "w",
+ "s",
+ "c",
+ "t",
+ "k",
+ "y",
+ "d",
+ "p",
+ "m",
+ "u",
+ "l",
+ "j",
+ "ł",
+ "g",
+ "b",
+ "h",
+ "ą",
+ "ę",
+ "ó",
+ ],
+ "Spanish": [
+ "e",
+ "a",
+ "o",
+ "n",
+ "s",
+ "r",
+ "i",
+ "l",
+ "d",
+ "t",
+ "c",
+ "u",
+ "m",
+ "p",
+ "b",
+ "g",
+ "v",
+ "f",
+ "y",
+ "ó",
+ "h",
+ "q",
+ "í",
+ "j",
+ "z",
+ "á",
+ ],
+ "Russian": [
+ "о",
+ "а",
+ "е",
+ "и",
+ "н",
+ "с",
+ "т",
+ "р",
+ "в",
+ "л",
+ "к",
+ "м",
+ "д",
+ "п",
+ "у",
+ "г",
+ "я",
+ "ы",
+ "з",
+ "б",
+ "й",
+ "ь",
+ "ч",
+ "х",
+ "ж",
+ "ц",
+ ],
+ # Jap-Kanji
+ "Japanese": [
+ "人",
+ "一",
+ "大",
+ "亅",
+ "丁",
+ "丨",
+ "竹",
+ "笑",
+ "口",
+ "日",
+ "今",
+ "二",
+ "彳",
+ "行",
+ "十",
+ "土",
+ "丶",
+ "寸",
+ "寺",
+ "時",
+ "乙",
+ "丿",
+ "乂",
+ "气",
+ "気",
+ "冂",
+ "巾",
+ "亠",
+ "市",
+ "目",
+ "儿",
+ "見",
+ "八",
+ "小",
+ "凵",
+ "県",
+ "月",
+ "彐",
+ "門",
+ "間",
+ "木",
+ "東",
+ "山",
+ "出",
+ "本",
+ "中",
+ "刀",
+ "分",
+ "耳",
+ "又",
+ "取",
+ "最",
+ "言",
+ "田",
+ "心",
+ "思",
+ "刂",
+ "前",
+ "京",
+ "尹",
+ "事",
+ "生",
+ "厶",
+ "云",
+ "会",
+ "未",
+ "来",
+ "白",
+ "冫",
+ "楽",
+ "灬",
+ "馬",
+ "尸",
+ "尺",
+ "駅",
+ "明",
+ "耂",
+ "者",
+ "了",
+ "阝",
+ "都",
+ "高",
+ "卜",
+ "占",
+ "厂",
+ "广",
+ "店",
+ "子",
+ "申",
+ "奄",
+ "亻",
+ "俺",
+ "上",
+ "方",
+ "冖",
+ "学",
+ "衣",
+ "艮",
+ "食",
+ "自",
+ ],
+ # Jap-Katakana
+ "Japanese—": [
+ "ー",
+ "ン",
+ "ス",
+ "・",
+ "ル",
+ "ト",
+ "リ",
+ "イ",
+ "ア",
+ "ラ",
+ "ッ",
+ "ク",
+ "ド",
+ "シ",
+ "レ",
+ "ジ",
+ "タ",
+ "フ",
+ "ロ",
+ "カ",
+ "テ",
+ "マ",
+ "ィ",
+ "グ",
+ "バ",
+ "ム",
+ "プ",
+ "オ",
+ "コ",
+ "デ",
+ "ニ",
+ "ウ",
+ "メ",
+ "サ",
+ "ビ",
+ "ナ",
+ "ブ",
+ "ャ",
+ "エ",
+ "ュ",
+ "チ",
+ "キ",
+ "ズ",
+ "ダ",
+ "パ",
+ "ミ",
+ "ェ",
+ "ョ",
+ "ハ",
+ "セ",
+ "ベ",
+ "ガ",
+ "モ",
+ "ツ",
+ "ネ",
+ "ボ",
+ "ソ",
+ "ノ",
+ "ァ",
+ "ヴ",
+ "ワ",
+ "ポ",
+ "ペ",
+ "ピ",
+ "ケ",
+ "ゴ",
+ "ギ",
+ "ザ",
+ "ホ",
+ "ゲ",
+ "ォ",
+ "ヤ",
+ "ヒ",
+ "ユ",
+ "ヨ",
+ "ヘ",
+ "ゼ",
+ "ヌ",
+ "ゥ",
+ "ゾ",
+ "ヶ",
+ "ヂ",
+ "ヲ",
+ "ヅ",
+ "ヵ",
+ "ヱ",
+ "ヰ",
+ "ヮ",
+ "ヽ",
+ "゠",
+ "ヾ",
+ "ヷ",
+ "ヿ",
+ "ヸ",
+ "ヹ",
+ "ヺ",
+ ],
+ # Jap-Hiragana
+ "Japanese——": [
+ "の",
+ "に",
+ "る",
+ "た",
+ "と",
+ "は",
+ "し",
+ "い",
+ "を",
+ "で",
+ "て",
+ "が",
+ "な",
+ "れ",
+ "か",
+ "ら",
+ "さ",
+ "っ",
+ "り",
+ "す",
+ "あ",
+ "も",
+ "こ",
+ "ま",
+ "う",
+ "く",
+ "よ",
+ "き",
+ "ん",
+ "め",
+ "お",
+ "け",
+ "そ",
+ "つ",
+ "だ",
+ "や",
+ "え",
+ "ど",
+ "わ",
+ "ち",
+ "み",
+ "せ",
+ "じ",
+ "ば",
+ "へ",
+ "び",
+ "ず",
+ "ろ",
+ "ほ",
+ "げ",
+ "む",
+ "べ",
+ "ひ",
+ "ょ",
+ "ゆ",
+ "ぶ",
+ "ご",
+ "ゃ",
+ "ね",
+ "ふ",
+ "ぐ",
+ "ぎ",
+ "ぼ",
+ "ゅ",
+ "づ",
+ "ざ",
+ "ぞ",
+ "ぬ",
+ "ぜ",
+ "ぱ",
+ "ぽ",
+ "ぷ",
+ "ぴ",
+ "ぃ",
+ "ぁ",
+ "ぇ",
+ "ぺ",
+ "ゞ",
+ "ぢ",
+ "ぉ",
+ "ぅ",
+ "ゐ",
+ "ゝ",
+ "ゑ",
+ "゛",
+ "゜",
+ "ゎ",
+ "ゔ",
+ "゚",
+ "ゟ",
+ "゙",
+ "ゕ",
+ "ゖ",
+ ],
+ "Portuguese": [
+ "a",
+ "e",
+ "o",
+ "s",
+ "i",
+ "r",
+ "d",
+ "n",
+ "t",
+ "m",
+ "u",
+ "c",
+ "l",
+ "p",
+ "g",
+ "v",
+ "b",
+ "f",
+ "h",
+ "ã",
+ "q",
+ "é",
+ "ç",
+ "á",
+ "z",
+ "í",
+ ],
+ "Swedish": [
+ "e",
+ "a",
+ "n",
+ "r",
+ "t",
+ "s",
+ "i",
+ "l",
+ "d",
+ "o",
+ "m",
+ "k",
+ "g",
+ "v",
+ "h",
+ "f",
+ "u",
+ "p",
+ "ä",
+ "c",
+ "b",
+ "ö",
+ "å",
+ "y",
+ "j",
+ "x",
+ ],
+ "Chinese": [
+ "的",
+ "一",
+ "是",
+ "不",
+ "了",
+ "在",
+ "人",
+ "有",
+ "我",
+ "他",
+ "这",
+ "个",
+ "们",
+ "中",
+ "来",
+ "上",
+ "大",
+ "为",
+ "和",
+ "国",
+ "地",
+ "到",
+ "以",
+ "说",
+ "时",
+ "要",
+ "就",
+ "出",
+ "会",
+ "可",
+ "也",
+ "你",
+ "对",
+ "生",
+ "能",
+ "而",
+ "子",
+ "那",
+ "得",
+ "于",
+ "着",
+ "下",
+ "自",
+ "之",
+ "年",
+ "过",
+ "发",
+ "后",
+ "作",
+ "里",
+ "用",
+ "道",
+ "行",
+ "所",
+ "然",
+ "家",
+ "种",
+ "事",
+ "成",
+ "方",
+ "多",
+ "经",
+ "么",
+ "去",
+ "法",
+ "学",
+ "如",
+ "都",
+ "同",
+ "现",
+ "当",
+ "没",
+ "动",
+ "面",
+ "起",
+ "看",
+ "定",
+ "天",
+ "分",
+ "还",
+ "进",
+ "好",
+ "小",
+ "部",
+ "其",
+ "些",
+ "主",
+ "样",
+ "理",
+ "心",
+ "她",
+ "本",
+ "前",
+ "开",
+ "但",
+ "因",
+ "只",
+ "从",
+ "想",
+ "实",
+ ],
+ "Ukrainian": [
+ "о",
+ "а",
+ "н",
+ "і",
+ "и",
+ "р",
+ "в",
+ "т",
+ "е",
+ "с",
+ "к",
+ "л",
+ "у",
+ "д",
+ "м",
+ "п",
+ "з",
+ "я",
+ "ь",
+ "б",
+ "г",
+ "й",
+ "ч",
+ "х",
+ "ц",
+ "ї",
+ ],
+ "Norwegian": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "s",
+ "i",
+ "o",
+ "l",
+ "d",
+ "g",
+ "k",
+ "m",
+ "v",
+ "f",
+ "p",
+ "u",
+ "b",
+ "h",
+ "å",
+ "y",
+ "j",
+ "ø",
+ "c",
+ "æ",
+ "w",
+ ],
+ "Finnish": [
+ "a",
+ "i",
+ "n",
+ "t",
+ "e",
+ "s",
+ "l",
+ "o",
+ "u",
+ "k",
+ "ä",
+ "m",
+ "r",
+ "v",
+ "j",
+ "h",
+ "p",
+ "y",
+ "d",
+ "ö",
+ "g",
+ "c",
+ "b",
+ "f",
+ "w",
+ "z",
+ ],
+ "Vietnamese": [
+ "n",
+ "h",
+ "t",
+ "i",
+ "c",
+ "g",
+ "a",
+ "o",
+ "u",
+ "m",
+ "l",
+ "r",
+ "à",
+ "đ",
+ "s",
+ "e",
+ "v",
+ "p",
+ "b",
+ "y",
+ "ư",
+ "d",
+ "á",
+ "k",
+ "ộ",
+ "ế",
+ ],
+ "Czech": [
+ "o",
+ "e",
+ "a",
+ "n",
+ "t",
+ "s",
+ "i",
+ "l",
+ "v",
+ "r",
+ "k",
+ "d",
+ "u",
+ "m",
+ "p",
+ "í",
+ "c",
+ "h",
+ "z",
+ "á",
+ "y",
+ "j",
+ "b",
+ "ě",
+ "é",
+ "ř",
+ ],
+ "Hungarian": [
+ "e",
+ "a",
+ "t",
+ "l",
+ "s",
+ "n",
+ "k",
+ "r",
+ "i",
+ "o",
+ "z",
+ "á",
+ "é",
+ "g",
+ "m",
+ "b",
+ "y",
+ "v",
+ "d",
+ "h",
+ "u",
+ "p",
+ "j",
+ "ö",
+ "f",
+ "c",
+ ],
+ "Korean": [
+ "이",
+ "다",
+ "에",
+ "의",
+ "는",
+ "로",
+ "하",
+ "을",
+ "가",
+ "고",
+ "지",
+ "서",
+ "한",
+ "은",
+ "기",
+ "으",
+ "년",
+ "대",
+ "사",
+ "시",
+ "를",
+ "리",
+ "도",
+ "인",
+ "스",
+ "일",
+ ],
+ "Indonesian": [
+ "a",
+ "n",
+ "e",
+ "i",
+ "r",
+ "t",
+ "u",
+ "s",
+ "d",
+ "k",
+ "m",
+ "l",
+ "g",
+ "p",
+ "b",
+ "o",
+ "h",
+ "y",
+ "j",
+ "c",
+ "w",
+ "f",
+ "v",
+ "z",
+ "x",
+ "q",
+ ],
+ "Turkish": [
+ "a",
+ "e",
+ "i",
+ "n",
+ "r",
+ "l",
+ "ı",
+ "k",
+ "d",
+ "t",
+ "s",
+ "m",
+ "y",
+ "u",
+ "o",
+ "b",
+ "ü",
+ "ş",
+ "v",
+ "g",
+ "z",
+ "h",
+ "c",
+ "p",
+ "ç",
+ "ğ",
+ ],
+ "Romanian": [
+ "e",
+ "i",
+ "a",
+ "r",
+ "n",
+ "t",
+ "u",
+ "l",
+ "o",
+ "c",
+ "s",
+ "d",
+ "p",
+ "m",
+ "ă",
+ "f",
+ "v",
+ "î",
+ "g",
+ "b",
+ "ș",
+ "ț",
+ "z",
+ "h",
+ "â",
+ "j",
+ ],
+ "Farsi": [
+ "ا",
+ "ی",
+ "ر",
+ "د",
+ "ن",
+ "ه",
+ "و",
+ "م",
+ "ت",
+ "ب",
+ "س",
+ "ل",
+ "ک",
+ "ش",
+ "ز",
+ "ف",
+ "گ",
+ "ع",
+ "خ",
+ "ق",
+ "ج",
+ "آ",
+ "پ",
+ "ح",
+ "ط",
+ "ص",
+ ],
+ "Arabic": [
+ "ا",
+ "ل",
+ "ي",
+ "م",
+ "و",
+ "ن",
+ "ر",
+ "ت",
+ "ب",
+ "ة",
+ "ع",
+ "د",
+ "س",
+ "ف",
+ "ه",
+ "ك",
+ "ق",
+ "أ",
+ "ح",
+ "ج",
+ "ش",
+ "ط",
+ "ص",
+ "ى",
+ "خ",
+ "إ",
+ ],
+ "Danish": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "i",
+ "s",
+ "d",
+ "l",
+ "o",
+ "g",
+ "m",
+ "k",
+ "f",
+ "v",
+ "u",
+ "b",
+ "h",
+ "p",
+ "å",
+ "y",
+ "ø",
+ "æ",
+ "c",
+ "j",
+ "w",
+ ],
+ "Serbian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "р",
+ "с",
+ "у",
+ "т",
+ "к",
+ "ј",
+ "в",
+ "д",
+ "м",
+ "п",
+ "л",
+ "г",
+ "з",
+ "б",
+ "a",
+ "i",
+ "e",
+ "o",
+ "n",
+ "ц",
+ "ш",
+ ],
+ "Lithuanian": [
+ "i",
+ "a",
+ "s",
+ "o",
+ "r",
+ "e",
+ "t",
+ "n",
+ "u",
+ "k",
+ "m",
+ "l",
+ "p",
+ "v",
+ "d",
+ "j",
+ "g",
+ "ė",
+ "b",
+ "y",
+ "ų",
+ "š",
+ "ž",
+ "c",
+ "ą",
+ "į",
+ ],
+ "Slovene": [
+ "e",
+ "a",
+ "i",
+ "o",
+ "n",
+ "r",
+ "s",
+ "l",
+ "t",
+ "j",
+ "v",
+ "k",
+ "d",
+ "p",
+ "m",
+ "u",
+ "z",
+ "b",
+ "g",
+ "h",
+ "č",
+ "c",
+ "š",
+ "ž",
+ "f",
+ "y",
+ ],
+ "Slovak": [
+ "o",
+ "a",
+ "e",
+ "n",
+ "i",
+ "r",
+ "v",
+ "t",
+ "s",
+ "l",
+ "k",
+ "d",
+ "m",
+ "p",
+ "u",
+ "c",
+ "h",
+ "j",
+ "b",
+ "z",
+ "á",
+ "y",
+ "ý",
+ "í",
+ "č",
+ "é",
+ ],
+ "Hebrew": [
+ "י",
+ "ו",
+ "ה",
+ "ל",
+ "ר",
+ "ב",
+ "ת",
+ "מ",
+ "א",
+ "ש",
+ "נ",
+ "ע",
+ "ם",
+ "ד",
+ "ק",
+ "ח",
+ "פ",
+ "ס",
+ "כ",
+ "ג",
+ "ט",
+ "צ",
+ "ן",
+ "ז",
+ "ך",
+ ],
+ "Bulgarian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "т",
+ "р",
+ "с",
+ "в",
+ "л",
+ "к",
+ "д",
+ "п",
+ "м",
+ "з",
+ "г",
+ "я",
+ "ъ",
+ "у",
+ "б",
+ "ч",
+ "ц",
+ "й",
+ "ж",
+ "щ",
+ "х",
+ ],
+ "Croatian": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "j",
+ "s",
+ "t",
+ "u",
+ "k",
+ "l",
+ "v",
+ "d",
+ "m",
+ "p",
+ "g",
+ "z",
+ "b",
+ "c",
+ "č",
+ "h",
+ "š",
+ "ž",
+ "ć",
+ "f",
+ ],
+ "Hindi": [
+ "क",
+ "र",
+ "स",
+ "न",
+ "त",
+ "म",
+ "ह",
+ "प",
+ "य",
+ "ल",
+ "व",
+ "ज",
+ "द",
+ "ग",
+ "ब",
+ "श",
+ "ट",
+ "अ",
+ "ए",
+ "थ",
+ "भ",
+ "ड",
+ "च",
+ "ध",
+ "ष",
+ "इ",
+ ],
+ "Estonian": [
+ "a",
+ "i",
+ "e",
+ "s",
+ "t",
+ "l",
+ "u",
+ "n",
+ "o",
+ "k",
+ "r",
+ "d",
+ "m",
+ "v",
+ "g",
+ "p",
+ "j",
+ "h",
+ "ä",
+ "b",
+ "õ",
+ "ü",
+ "f",
+ "c",
+ "ö",
+ "y",
+ ],
+ "Thai": [
+ "า",
+ "น",
+ "ร",
+ "อ",
+ "ก",
+ "เ",
+ "ง",
+ "ม",
+ "ย",
+ "ล",
+ "ว",
+ "ด",
+ "ท",
+ "ส",
+ "ต",
+ "ะ",
+ "ป",
+ "บ",
+ "ค",
+ "ห",
+ "แ",
+ "จ",
+ "พ",
+ "ช",
+ "ข",
+ "ใ",
+ ],
+ "Greek": [
+ "α",
+ "τ",
+ "ο",
+ "ι",
+ "ε",
+ "ν",
+ "ρ",
+ "σ",
+ "κ",
+ "η",
+ "π",
+ "ς",
+ "υ",
+ "μ",
+ "λ",
+ "ί",
+ "ό",
+ "ά",
+ "γ",
+ "έ",
+ "δ",
+ "ή",
+ "ω",
+ "χ",
+ "θ",
+ "ύ",
+ ],
+ "Tamil": [
+ "க",
+ "த",
+ "ப",
+ "ட",
+ "ர",
+ "ம",
+ "ல",
+ "ன",
+ "வ",
+ "ற",
+ "ய",
+ "ள",
+ "ச",
+ "ந",
+ "இ",
+ "ண",
+ "அ",
+ "ஆ",
+ "ழ",
+ "ங",
+ "எ",
+ "உ",
+ "ஒ",
+ "ஸ",
+ ],
+ "Kazakh": [
+ "а",
+ "ы",
+ "е",
+ "н",
+ "т",
+ "р",
+ "л",
+ "і",
+ "д",
+ "с",
+ "м",
+ "қ",
+ "к",
+ "о",
+ "б",
+ "и",
+ "у",
+ "ғ",
+ "ж",
+ "ң",
+ "з",
+ "ш",
+ "й",
+ "п",
+ "г",
+ "ө",
+ ],
+}
+
+LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/legacy.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/legacy.py"
new file mode 100644
index 0000000..360a310
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/legacy.py"
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+from warnings import warn
+
+from .api import from_bytes
+from .constant import CHARDET_CORRESPONDENCE, TOO_SMALL_SEQUENCE
+
+# TODO: remove this check when dropping Python 3.7 support
+if TYPE_CHECKING:
+ from typing_extensions import TypedDict
+
+ class ResultDict(TypedDict):
+ encoding: str | None
+ language: str
+ confidence: float | None
+
+
+def detect(
+ byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
+) -> ResultDict:
+ """
+ chardet legacy method
+ Detect the encoding of the given byte string. It should be mostly backward-compatible.
+ Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
+ This function is deprecated and should be used to migrate your project easily, consult the documentation for
+ further information. Not planned for removal.
+
+ :param byte_str: The byte sequence to examine.
+ :param should_rename_legacy: Should we rename legacy encodings
+ to their more modern equivalents?
+ """
+ if len(kwargs):
+ warn(
+ f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
+ )
+
+ if not isinstance(byte_str, (bytearray, bytes)):
+ raise TypeError( # pragma: nocover
+ f"Expected object of type bytes or bytearray, got: {type(byte_str)}"
+ )
+
+ if isinstance(byte_str, bytearray):
+ byte_str = bytes(byte_str)
+
+ r = from_bytes(byte_str).best()
+
+ encoding = r.encoding if r is not None else None
+ language = r.language if r is not None and r.language != "Unknown" else ""
+ confidence = 1.0 - r.chaos if r is not None else None
+
+ # automatically lower confidence
+ # on small bytes samples.
+ # https://github.com/jawah/charset_normalizer/issues/391
+ if (
+ confidence is not None
+ and confidence >= 0.9
+ and encoding
+ not in {
+ "utf_8",
+ "ascii",
+ }
+ and r.bom is False # type: ignore[union-attr]
+ and len(byte_str) < TOO_SMALL_SEQUENCE
+ ):
+ confidence -= 0.2
+
+ # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
+ # but chardet does return 'utf-8-sig' and it is a valid codec name.
+ if r is not None and encoding == "utf_8" and r.bom:
+ encoding += "_sig"
+
+ if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
+ encoding = CHARDET_CORRESPONDENCE[encoding]
+
+ return {
+ "encoding": encoding,
+ "language": language,
+ "confidence": confidence,
+ }
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..047d0bc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.py"
new file mode 100644
index 0000000..12ce024
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md.py"
@@ -0,0 +1,635 @@
+from __future__ import annotations
+
+from functools import lru_cache
+from logging import getLogger
+
+from .constant import (
+ COMMON_SAFE_ASCII_CHARACTERS,
+ TRACE,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+)
+from .utils import (
+ is_accentuated,
+ is_arabic,
+ is_arabic_isolated_form,
+ is_case_variable,
+ is_cjk,
+ is_emoticon,
+ is_hangul,
+ is_hiragana,
+ is_katakana,
+ is_latin,
+ is_punctuation,
+ is_separator,
+ is_symbol,
+ is_thai,
+ is_unprintable,
+ remove_accent,
+ unicode_range,
+ is_cjk_uncommon,
+)
+
+
+class MessDetectorPlugin:
+ """
+ Base abstract class used for mess detection plugins.
+ All detectors MUST extend and implement given methods.
+ """
+
+ def eligible(self, character: str) -> bool:
+ """
+ Determine if given character should be fed in.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def feed(self, character: str) -> None:
+ """
+ The main routine to be executed upon character.
+ Insert the logic in witch the text would be considered chaotic.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def reset(self) -> None: # pragma: no cover
+ """
+ Permit to reset the plugin to the initial state.
+ """
+ raise NotImplementedError
+
+ @property
+ def ratio(self) -> float:
+ """
+ Compute the chaos ratio based on what your feed() has seen.
+ Must NOT be lower than 0.; No restriction gt 0.
+ """
+ raise NotImplementedError # pragma: nocover
+
+
+class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._punctuation_count: int = 0
+ self._symbol_count: int = 0
+ self._character_count: int = 0
+
+ self._last_printable_char: str | None = None
+ self._frenzy_symbol_in_word: bool = False
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character != self._last_printable_char
+ and character not in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ if is_punctuation(character):
+ self._punctuation_count += 1
+ elif (
+ character.isdigit() is False
+ and is_symbol(character)
+ and is_emoticon(character) is False
+ ):
+ self._symbol_count += 2
+
+ self._last_printable_char = character
+
+ def reset(self) -> None: # Abstract
+ self._punctuation_count = 0
+ self._character_count = 0
+ self._symbol_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ ratio_of_punctuation: float = (
+ self._punctuation_count + self._symbol_count
+ ) / self._character_count
+
+ return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0
+
+
+class TooManyAccentuatedPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._accentuated_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_accentuated(character):
+ self._accentuated_count += 1
+
+ def reset(self) -> None: # Abstract
+ self._character_count = 0
+ self._accentuated_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count < 8:
+ return 0.0
+
+ ratio_of_accentuation: float = self._accentuated_count / self._character_count
+ return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0
+
+
+class UnprintablePlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._unprintable_count: int = 0
+ self._character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if is_unprintable(character):
+ self._unprintable_count += 1
+ self._character_count += 1
+
+ def reset(self) -> None: # Abstract
+ self._unprintable_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._unprintable_count * 8) / self._character_count
+
+
+class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._successive_count: int = 0
+ self._character_count: int = 0
+
+ self._last_latin_character: str | None = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha() and is_latin(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+ if (
+ self._last_latin_character is not None
+ and is_accentuated(character)
+ and is_accentuated(self._last_latin_character)
+ ):
+ if character.isupper() and self._last_latin_character.isupper():
+ self._successive_count += 1
+ # Worse if its the same char duplicated with different accent.
+ if remove_accent(character) == remove_accent(self._last_latin_character):
+ self._successive_count += 1
+ self._last_latin_character = character
+
+ def reset(self) -> None: # Abstract
+ self._successive_count = 0
+ self._character_count = 0
+ self._last_latin_character = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._successive_count * 2) / self._character_count
+
+
+class SuspiciousRange(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._suspicious_successive_range_count: int = 0
+ self._character_count: int = 0
+ self._last_printable_seen: str | None = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character.isspace()
+ or is_punctuation(character)
+ or character in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ self._last_printable_seen = None
+ return
+
+ if self._last_printable_seen is None:
+ self._last_printable_seen = character
+ return
+
+ unicode_range_a: str | None = unicode_range(self._last_printable_seen)
+ unicode_range_b: str | None = unicode_range(character)
+
+ if is_suspiciously_successive_range(unicode_range_a, unicode_range_b):
+ self._suspicious_successive_range_count += 1
+
+ self._last_printable_seen = character
+
+ def reset(self) -> None: # Abstract
+ self._character_count = 0
+ self._suspicious_successive_range_count = 0
+ self._last_printable_seen = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count <= 13:
+ return 0.0
+
+ ratio_of_suspicious_range_usage: float = (
+ self._suspicious_successive_range_count * 2
+ ) / self._character_count
+
+ return ratio_of_suspicious_range_usage
+
+
+class SuperWeirdWordPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._word_count: int = 0
+ self._bad_word_count: int = 0
+ self._foreign_long_count: int = 0
+
+ self._is_current_word_bad: bool = False
+ self._foreign_long_watch: bool = False
+
+ self._character_count: int = 0
+ self._bad_character_count: int = 0
+
+ self._buffer: str = ""
+ self._buffer_accent_count: int = 0
+ self._buffer_glyph_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character.isalpha():
+ self._buffer += character
+ if is_accentuated(character):
+ self._buffer_accent_count += 1
+ if (
+ self._foreign_long_watch is False
+ and (is_latin(character) is False or is_accentuated(character))
+ and is_cjk(character) is False
+ and is_hangul(character) is False
+ and is_katakana(character) is False
+ and is_hiragana(character) is False
+ and is_thai(character) is False
+ ):
+ self._foreign_long_watch = True
+ if (
+ is_cjk(character)
+ or is_hangul(character)
+ or is_katakana(character)
+ or is_hiragana(character)
+ or is_thai(character)
+ ):
+ self._buffer_glyph_count += 1
+ return
+ if not self._buffer:
+ return
+ if (
+ character.isspace() or is_punctuation(character) or is_separator(character)
+ ) and self._buffer:
+ self._word_count += 1
+ buffer_length: int = len(self._buffer)
+
+ self._character_count += buffer_length
+
+ if buffer_length >= 4:
+ if self._buffer_accent_count / buffer_length >= 0.5:
+ self._is_current_word_bad = True
+ # Word/Buffer ending with an upper case accentuated letter are so rare,
+ # that we will consider them all as suspicious. Same weight as foreign_long suspicious.
+ elif (
+ is_accentuated(self._buffer[-1])
+ and self._buffer[-1].isupper()
+ and all(_.isupper() for _ in self._buffer) is False
+ ):
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+ elif self._buffer_glyph_count == 1:
+ self._is_current_word_bad = True
+ self._foreign_long_count += 1
+ if buffer_length >= 24 and self._foreign_long_watch:
+ camel_case_dst = [
+ i
+ for c, i in zip(self._buffer, range(0, buffer_length))
+ if c.isupper()
+ ]
+ probable_camel_cased: bool = False
+
+ if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3):
+ probable_camel_cased = True
+
+ if not probable_camel_cased:
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+
+ if self._is_current_word_bad:
+ self._bad_word_count += 1
+ self._bad_character_count += len(self._buffer)
+ self._is_current_word_bad = False
+
+ self._foreign_long_watch = False
+ self._buffer = ""
+ self._buffer_accent_count = 0
+ self._buffer_glyph_count = 0
+ elif (
+ character not in {"<", ">", "-", "=", "~", "|", "_"}
+ and character.isdigit() is False
+ and is_symbol(character)
+ ):
+ self._is_current_word_bad = True
+ self._buffer += character
+
+ def reset(self) -> None: # Abstract
+ self._buffer = ""
+ self._is_current_word_bad = False
+ self._foreign_long_watch = False
+ self._bad_word_count = 0
+ self._word_count = 0
+ self._character_count = 0
+ self._bad_character_count = 0
+ self._foreign_long_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._word_count <= 10 and self._foreign_long_count == 0:
+ return 0.0
+
+ return self._bad_character_count / self._character_count
+
+
+class CjkUncommonPlugin(MessDetectorPlugin):
+ """
+ Detect messy CJK text that probably means nothing.
+ """
+
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._uncommon_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return is_cjk(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_cjk_uncommon(character):
+ self._uncommon_count += 1
+ return
+
+ def reset(self) -> None: # Abstract
+ self._character_count = 0
+ self._uncommon_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count < 8:
+ return 0.0
+
+ uncommon_form_usage: float = self._uncommon_count / self._character_count
+
+ # we can be pretty sure it's garbage when uncommon characters are widely
+ # used. otherwise it could just be traditional chinese for example.
+ return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0
+
+
+class ArchaicUpperLowerPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._buf: bool = False
+
+ self._character_count_since_last_sep: int = 0
+
+ self._successive_upper_lower_count: int = 0
+ self._successive_upper_lower_count_final: int = 0
+
+ self._character_count: int = 0
+
+ self._last_alpha_seen: str | None = None
+ self._current_ascii_only: bool = True
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ is_concerned = character.isalpha() and is_case_variable(character)
+ chunk_sep = is_concerned is False
+
+ if chunk_sep and self._character_count_since_last_sep > 0:
+ if (
+ self._character_count_since_last_sep <= 64
+ and character.isdigit() is False
+ and self._current_ascii_only is False
+ ):
+ self._successive_upper_lower_count_final += (
+ self._successive_upper_lower_count
+ )
+
+ self._successive_upper_lower_count = 0
+ self._character_count_since_last_sep = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._character_count += 1
+ self._current_ascii_only = True
+
+ return
+
+ if self._current_ascii_only is True and character.isascii() is False:
+ self._current_ascii_only = False
+
+ if self._last_alpha_seen is not None:
+ if (character.isupper() and self._last_alpha_seen.islower()) or (
+ character.islower() and self._last_alpha_seen.isupper()
+ ):
+ if self._buf is True:
+ self._successive_upper_lower_count += 2
+ self._buf = False
+ else:
+ self._buf = True
+ else:
+ self._buf = False
+
+ self._character_count += 1
+ self._character_count_since_last_sep += 1
+ self._last_alpha_seen = character
+
+ def reset(self) -> None: # Abstract
+ self._character_count = 0
+ self._character_count_since_last_sep = 0
+ self._successive_upper_lower_count = 0
+ self._successive_upper_lower_count_final = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._current_ascii_only = True
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return self._successive_upper_lower_count_final / self._character_count
+
+
+class ArabicIsolatedFormPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._isolated_form_count: int = 0
+
+ def reset(self) -> None: # Abstract
+ self._character_count = 0
+ self._isolated_form_count = 0
+
+ def eligible(self, character: str) -> bool:
+ return is_arabic(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_arabic_isolated_form(character):
+ self._isolated_form_count += 1
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count < 8:
+ return 0.0
+
+ isolated_form_usage: float = self._isolated_form_count / self._character_count
+
+ return isolated_form_usage
+
+
+@lru_cache(maxsize=1024)
+def is_suspiciously_successive_range(
+ unicode_range_a: str | None, unicode_range_b: str | None
+) -> bool:
+ """
+ Determine if two Unicode range seen next to each other can be considered as suspicious.
+ """
+ if unicode_range_a is None or unicode_range_b is None:
+ return True
+
+ if unicode_range_a == unicode_range_b:
+ return False
+
+ if "Latin" in unicode_range_a and "Latin" in unicode_range_b:
+ return False
+
+ if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b:
+ return False
+
+ # Latin characters can be accompanied with a combining diacritical mark
+ # eg. Vietnamese.
+ if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and (
+ "Combining" in unicode_range_a or "Combining" in unicode_range_b
+ ):
+ return False
+
+ keywords_range_a, keywords_range_b = (
+ unicode_range_a.split(" "),
+ unicode_range_b.split(" "),
+ )
+
+ for el in keywords_range_a:
+ if el in UNICODE_SECONDARY_RANGE_KEYWORD:
+ continue
+ if el in keywords_range_b:
+ return False
+
+ # Japanese Exception
+ range_a_jp_chars, range_b_jp_chars = (
+ unicode_range_a
+ in (
+ "Hiragana",
+ "Katakana",
+ ),
+ unicode_range_b in ("Hiragana", "Katakana"),
+ )
+ if (range_a_jp_chars or range_b_jp_chars) and (
+ "CJK" in unicode_range_a or "CJK" in unicode_range_b
+ ):
+ return False
+ if range_a_jp_chars and range_b_jp_chars:
+ return False
+
+ if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b:
+ if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
+ return False
+ if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
+ return False
+
+ # Chinese/Japanese use dedicated range for punctuation and/or separators.
+ if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or (
+ unicode_range_a in ["Katakana", "Hiragana"]
+ and unicode_range_b in ["Katakana", "Hiragana"]
+ ):
+ if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b:
+ return False
+ if "Forms" in unicode_range_a or "Forms" in unicode_range_b:
+ return False
+ if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
+ return False
+
+ return True
+
+
+@lru_cache(maxsize=2048)
+def mess_ratio(
+ decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False
+) -> float:
+ """
+ Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier.
+ """
+
+ detectors: list[MessDetectorPlugin] = [
+ md_class() for md_class in MessDetectorPlugin.__subclasses__()
+ ]
+
+ length: int = len(decoded_sequence) + 1
+
+ mean_mess_ratio: float = 0.0
+
+ if length < 512:
+ intermediary_mean_mess_ratio_calc: int = 32
+ elif length <= 1024:
+ intermediary_mean_mess_ratio_calc = 64
+ else:
+ intermediary_mean_mess_ratio_calc = 128
+
+ for character, index in zip(decoded_sequence + "\n", range(length)):
+ for detector in detectors:
+ if detector.eligible(character):
+ detector.feed(character)
+
+ if (
+ index > 0 and index % intermediary_mean_mess_ratio_calc == 0
+ ) or index == length - 1:
+ mean_mess_ratio = sum(dt.ratio for dt in detectors)
+
+ if mean_mess_ratio >= maximum_threshold:
+ break
+
+ if debug:
+ logger = getLogger("charset_normalizer")
+
+ logger.log(
+ TRACE,
+ "Mess-detector extended-analysis start. "
+ f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} "
+ f"maximum_threshold={maximum_threshold}",
+ )
+
+ if len(decoded_sequence) > 16:
+ logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}")
+ logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}")
+
+ for dt in detectors:
+ logger.log(TRACE, f"{dt.__class__}: {dt.ratio}")
+
+ return round(mean_mess_ratio, 3)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..c2c489e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/md__mypyc.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/models.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/models.py"
new file mode 100644
index 0000000..1042758
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/models.py"
@@ -0,0 +1,360 @@
+from __future__ import annotations
+
+from encodings.aliases import aliases
+from hashlib import sha256
+from json import dumps
+from re import sub
+from typing import Any, Iterator, List, Tuple
+
+from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
+from .utils import iana_name, is_multi_byte_encoding, unicode_range
+
+
+class CharsetMatch:
+ def __init__(
+ self,
+ payload: bytes,
+ guessed_encoding: str,
+ mean_mess_ratio: float,
+ has_sig_or_bom: bool,
+ languages: CoherenceMatches,
+ decoded_payload: str | None = None,
+ preemptive_declaration: str | None = None,
+ ):
+ self._payload: bytes = payload
+
+ self._encoding: str = guessed_encoding
+ self._mean_mess_ratio: float = mean_mess_ratio
+ self._languages: CoherenceMatches = languages
+ self._has_sig_or_bom: bool = has_sig_or_bom
+ self._unicode_ranges: list[str] | None = None
+
+ self._leaves: list[CharsetMatch] = []
+ self._mean_coherence_ratio: float = 0.0
+
+ self._output_payload: bytes | None = None
+ self._output_encoding: str | None = None
+
+ self._string: str | None = decoded_payload
+
+ self._preemptive_declaration: str | None = preemptive_declaration
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CharsetMatch):
+ if isinstance(other, str):
+ return iana_name(other) == self.encoding
+ return False
+ return self.encoding == other.encoding and self.fingerprint == other.fingerprint
+
+ def __lt__(self, other: object) -> bool:
+ """
+ Implemented to make sorted available upon CharsetMatches items.
+ """
+ if not isinstance(other, CharsetMatch):
+ raise ValueError
+
+ chaos_difference: float = abs(self.chaos - other.chaos)
+ coherence_difference: float = abs(self.coherence - other.coherence)
+
+ # Below 1% difference --> Use Coherence
+ if chaos_difference < 0.01 and coherence_difference > 0.02:
+ return self.coherence > other.coherence
+ elif chaos_difference < 0.01 and coherence_difference <= 0.02:
+ # When having a difficult decision, use the result that decoded as many multi-byte as possible.
+ # preserve RAM usage!
+ if len(self._payload) >= TOO_BIG_SEQUENCE:
+ return self.chaos < other.chaos
+ return self.multi_byte_usage > other.multi_byte_usage
+
+ return self.chaos < other.chaos
+
+ @property
+ def multi_byte_usage(self) -> float:
+ return 1.0 - (len(str(self)) / len(self.raw))
+
+ def __str__(self) -> str:
+ # Lazy Str Loading
+ if self._string is None:
+ self._string = str(self._payload, self._encoding, "strict")
+ return self._string
+
+ def __repr__(self) -> str:
+ return f"<CharsetMatch '{self.encoding}' bytes({self.fingerprint})>"
+
+ def add_submatch(self, other: CharsetMatch) -> None:
+ if not isinstance(other, CharsetMatch) or other == self:
+ raise ValueError(
+ "Unable to add instance <{}> as a submatch of a CharsetMatch".format(
+ other.__class__
+ )
+ )
+
+ other._string = None # Unload RAM usage; dirty trick.
+ self._leaves.append(other)
+
+ @property
+ def encoding(self) -> str:
+ return self._encoding
+
+ @property
+ def encoding_aliases(self) -> list[str]:
+ """
+ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855.
+ """
+ also_known_as: list[str] = []
+ for u, p in aliases.items():
+ if self.encoding == u:
+ also_known_as.append(p)
+ elif self.encoding == p:
+ also_known_as.append(u)
+ return also_known_as
+
+ @property
+ def bom(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def byte_order_mark(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def languages(self) -> list[str]:
+ """
+ Return the complete list of possible languages found in decoded sequence.
+ Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'.
+ """
+ return [e[0] for e in self._languages]
+
+ @property
+ def language(self) -> str:
+ """
+ Most probable language found in decoded sequence. If none were detected or inferred, the property will return
+ "Unknown".
+ """
+ if not self._languages:
+ # Trying to infer the language based on the given encoding
+ # Its either English or we should not pronounce ourselves in certain cases.
+ if "ascii" in self.could_be_from_charset:
+ return "English"
+
+ # doing it there to avoid circular import
+ from charset_normalizer.cd import encoding_languages, mb_encoding_languages
+
+ languages = (
+ mb_encoding_languages(self.encoding)
+ if is_multi_byte_encoding(self.encoding)
+ else encoding_languages(self.encoding)
+ )
+
+ if len(languages) == 0 or "Latin Based" in languages:
+ return "Unknown"
+
+ return languages[0]
+
+ return self._languages[0][0]
+
+ @property
+ def chaos(self) -> float:
+ return self._mean_mess_ratio
+
+ @property
+ def coherence(self) -> float:
+ if not self._languages:
+ return 0.0
+ return self._languages[0][1]
+
+ @property
+ def percent_chaos(self) -> float:
+ return round(self.chaos * 100, ndigits=3)
+
+ @property
+ def percent_coherence(self) -> float:
+ return round(self.coherence * 100, ndigits=3)
+
+ @property
+ def raw(self) -> bytes:
+ """
+ Original untouched bytes.
+ """
+ return self._payload
+
+ @property
+ def submatch(self) -> list[CharsetMatch]:
+ return self._leaves
+
+ @property
+ def has_submatch(self) -> bool:
+ return len(self._leaves) > 0
+
+ @property
+ def alphabets(self) -> list[str]:
+ if self._unicode_ranges is not None:
+ return self._unicode_ranges
+ # list detected ranges
+ detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)]
+ # filter and sort
+ self._unicode_ranges = sorted(list({r for r in detected_ranges if r}))
+ return self._unicode_ranges
+
+ @property
+ def could_be_from_charset(self) -> list[str]:
+ """
+ The complete list of encoding that output the exact SAME str result and therefore could be the originating
+ encoding.
+ This list does include the encoding available in property 'encoding'.
+ """
+ return [self._encoding] + [m.encoding for m in self._leaves]
+
+ def output(self, encoding: str = "utf_8") -> bytes:
+ """
+ Method to get re-encoded bytes payload using given target encoding. Default to UTF-8.
+ Any errors will be simply ignored by the encoder NOT replaced.
+ """
+ if self._output_encoding is None or self._output_encoding != encoding:
+ self._output_encoding = encoding
+ decoded_string = str(self)
+ if (
+ self._preemptive_declaration is not None
+ and self._preemptive_declaration.lower()
+ not in ["utf-8", "utf8", "utf_8"]
+ ):
+ patched_header = sub(
+ RE_POSSIBLE_ENCODING_INDICATION,
+ lambda m: m.string[m.span()[0] : m.span()[1]].replace(
+ m.groups()[0],
+ iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type]
+ ),
+ decoded_string[:8192],
+ count=1,
+ )
+
+ decoded_string = patched_header + decoded_string[8192:]
+
+ self._output_payload = decoded_string.encode(encoding, "replace")
+
+ return self._output_payload # type: ignore
+
+ @property
+ def fingerprint(self) -> str:
+ """
+ Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one.
+ """
+ return sha256(self.output()).hexdigest()
+
+
+class CharsetMatches:
+ """
+ Container with every CharsetMatch items ordered by default from most probable to the less one.
+ Act like a list(iterable) but does not implements all related methods.
+ """
+
+ def __init__(self, results: list[CharsetMatch] | None = None):
+ self._results: list[CharsetMatch] = sorted(results) if results else []
+
+ def __iter__(self) -> Iterator[CharsetMatch]:
+ yield from self._results
+
+ def __getitem__(self, item: int | str) -> CharsetMatch:
+ """
+ Retrieve a single item either by its position or encoding name (alias may be used here).
+ Raise KeyError upon invalid index or encoding not present in results.
+ """
+ if isinstance(item, int):
+ return self._results[item]
+ if isinstance(item, str):
+ item = iana_name(item, False)
+ for result in self._results:
+ if item in result.could_be_from_charset:
+ return result
+ raise KeyError
+
+ def __len__(self) -> int:
+ return len(self._results)
+
+ def __bool__(self) -> bool:
+ return len(self._results) > 0
+
+ def append(self, item: CharsetMatch) -> None:
+ """
+ Insert a single match. Will be inserted accordingly to preserve sort.
+ Can be inserted as a submatch.
+ """
+ if not isinstance(item, CharsetMatch):
+ raise ValueError(
+ "Cannot append instance '{}' to CharsetMatches".format(
+ str(item.__class__)
+ )
+ )
+ # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
+ if len(item.raw) < TOO_BIG_SEQUENCE:
+ for match in self._results:
+ if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
+ match.add_submatch(item)
+ return
+ self._results.append(item)
+ self._results = sorted(self._results)
+
+ def best(self) -> CharsetMatch | None:
+ """
+ Simply return the first match. Strict equivalent to matches[0].
+ """
+ if not self._results:
+ return None
+ return self._results[0]
+
+ def first(self) -> CharsetMatch | None:
+ """
+ Redundant method, call the method best(). Kept for BC reasons.
+ """
+ return self.best()
+
+
+CoherenceMatch = Tuple[str, float]
+CoherenceMatches = List[CoherenceMatch]
+
+
+class CliDetectionResult:
+ def __init__(
+ self,
+ path: str,
+ encoding: str | None,
+ encoding_aliases: list[str],
+ alternative_encodings: list[str],
+ language: str,
+ alphabets: list[str],
+ has_sig_or_bom: bool,
+ chaos: float,
+ coherence: float,
+ unicode_path: str | None,
+ is_preferred: bool,
+ ):
+ self.path: str = path
+ self.unicode_path: str | None = unicode_path
+ self.encoding: str | None = encoding
+ self.encoding_aliases: list[str] = encoding_aliases
+ self.alternative_encodings: list[str] = alternative_encodings
+ self.language: str = language
+ self.alphabets: list[str] = alphabets
+ self.has_sig_or_bom: bool = has_sig_or_bom
+ self.chaos: float = chaos
+ self.coherence: float = coherence
+ self.is_preferred: bool = is_preferred
+
+ @property
+ def __dict__(self) -> dict[str, Any]: # type: ignore
+ return {
+ "path": self.path,
+ "encoding": self.encoding,
+ "encoding_aliases": self.encoding_aliases,
+ "alternative_encodings": self.alternative_encodings,
+ "language": self.language,
+ "alphabets": self.alphabets,
+ "has_sig_or_bom": self.has_sig_or_bom,
+ "chaos": self.chaos,
+ "coherence": self.coherence,
+ "unicode_path": self.unicode_path,
+ "is_preferred": self.is_preferred,
+ }
+
+ def to_json(self) -> str:
+ return dumps(self.__dict__, ensure_ascii=True, indent=4)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/utils.py"
new file mode 100644
index 0000000..6bf0384
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/utils.py"
@@ -0,0 +1,414 @@
+from __future__ import annotations
+
+import importlib
+import logging
+import unicodedata
+from codecs import IncrementalDecoder
+from encodings.aliases import aliases
+from functools import lru_cache
+from re import findall
+from typing import Generator
+
+from _multibytecodec import ( # type: ignore[import-not-found,import]
+ MultibyteIncrementalDecoder,
+)
+
+from .constant import (
+ ENCODING_MARKS,
+ IANA_SUPPORTED_SIMILAR,
+ RE_POSSIBLE_ENCODING_INDICATION,
+ UNICODE_RANGES_COMBINED,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+ UTF8_MAXIMAL_ALLOCATION,
+ COMMON_CJK_CHARACTERS,
+)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_accentuated(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+ return (
+ "WITH GRAVE" in description
+ or "WITH ACUTE" in description
+ or "WITH CEDILLA" in description
+ or "WITH DIAERESIS" in description
+ or "WITH CIRCUMFLEX" in description
+ or "WITH TILDE" in description
+ or "WITH MACRON" in description
+ or "WITH RING ABOVE" in description
+ )
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def remove_accent(character: str) -> str:
+ decomposed: str = unicodedata.decomposition(character)
+ if not decomposed:
+ return character
+
+ codes: list[str] = decomposed.split(" ")
+
+ return chr(int(codes[0], 16))
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def unicode_range(character: str) -> str | None:
+ """
+ Retrieve the Unicode range official name from a single character.
+ """
+ character_ord: int = ord(character)
+
+ for range_name, ord_range in UNICODE_RANGES_COMBINED.items():
+ if character_ord in ord_range:
+ return range_name
+
+ return None
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_latin(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+ return "LATIN" in description
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_punctuation(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "P" in character_category:
+ return True
+
+ character_range: str | None = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Punctuation" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_symbol(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "S" in character_category or "N" in character_category:
+ return True
+
+ character_range: str | None = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Forms" in character_range and character_category != "Lo"
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_emoticon(character: str) -> bool:
+ character_range: str | None = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Emoticons" in character_range or "Pictographs" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_separator(character: str) -> bool:
+ if character.isspace() or character in {"|", "+", "<", ">"}:
+ return True
+
+ character_category: str = unicodedata.category(character)
+
+ return "Z" in character_category or character_category in {"Po", "Pd", "Pc"}
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_case_variable(character: str) -> bool:
+ return character.islower() != character.isupper()
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_cjk(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "CJK" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hiragana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "HIRAGANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_katakana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "KATAKANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hangul(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "HANGUL" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_thai(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "THAI" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_arabic(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "ARABIC" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_arabic_isolated_form(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError: # Defensive: unicode database outdated?
+ return False
+
+ return "ARABIC" in character_name and "ISOLATED FORM" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_cjk_uncommon(character: str) -> bool:
+ return character not in COMMON_CJK_CHARACTERS
+
+
+@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED))
+def is_unicode_range_secondary(range_name: str) -> bool:
+ return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_unprintable(character: str) -> bool:
+ return (
+ character.isspace() is False # includes \n \t \r \v
+ and character.isprintable() is False
+ and character != "\x1a" # Why? Its the ASCII substitute character.
+ and character != "\ufeff" # bug discovered in Python,
+ # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space.
+ )
+
+
+def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None:
+ """
+ Extract using ASCII-only decoder any specified encoding in the first n-bytes.
+ """
+ if not isinstance(sequence, bytes):
+ raise TypeError
+
+ seq_len: int = len(sequence)
+
+ results: list[str] = findall(
+ RE_POSSIBLE_ENCODING_INDICATION,
+ sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"),
+ )
+
+ if len(results) == 0:
+ return None
+
+ for specified_encoding in results:
+ specified_encoding = specified_encoding.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if encoding_alias == specified_encoding:
+ return encoding_iana
+ if encoding_iana == specified_encoding:
+ return encoding_iana
+
+ return None
+
+
+@lru_cache(maxsize=128)
+def is_multi_byte_encoding(name: str) -> bool:
+ """
+ Verify is a specific encoding is a multi byte one based on it IANA name
+ """
+ return name in {
+ "utf_8",
+ "utf_8_sig",
+ "utf_16",
+ "utf_16_be",
+ "utf_16_le",
+ "utf_32",
+ "utf_32_le",
+ "utf_32_be",
+ "utf_7",
+ } or issubclass(
+ importlib.import_module(f"encodings.{name}").IncrementalDecoder,
+ MultibyteIncrementalDecoder,
+ )
+
+
+def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]:
+ """
+ Identify and extract SIG/BOM in given sequence.
+ """
+
+ for iana_encoding in ENCODING_MARKS:
+ marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding]
+
+ if isinstance(marks, bytes):
+ marks = [marks]
+
+ for mark in marks:
+ if sequence.startswith(mark):
+ return iana_encoding, mark
+
+ return None, b""
+
+
+def should_strip_sig_or_bom(iana_encoding: str) -> bool:
+ return iana_encoding not in {"utf_16", "utf_32"}
+
+
+def iana_name(cp_name: str, strict: bool = True) -> str:
+ """Returns the Python normalized encoding name (Not the IANA official name)."""
+ cp_name = cp_name.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if cp_name in [encoding_alias, encoding_iana]:
+ return encoding_iana
+
+ if strict:
+ raise ValueError(f"Unable to retrieve IANA for '{cp_name}'")
+
+ return cp_name
+
+
+def cp_similarity(iana_name_a: str, iana_name_b: str) -> float:
+ if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b):
+ return 0.0
+
+ decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder
+ decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder
+
+ id_a: IncrementalDecoder = decoder_a(errors="ignore")
+ id_b: IncrementalDecoder = decoder_b(errors="ignore")
+
+ character_match_count: int = 0
+
+ for i in range(255):
+ to_be_decoded: bytes = bytes([i])
+ if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded):
+ character_match_count += 1
+
+ return character_match_count / 254
+
+
+def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool:
+ """
+ Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using
+ the function cp_similarity.
+ """
+ return (
+ iana_name_a in IANA_SUPPORTED_SIMILAR
+ and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a]
+ )
+
+
+def set_logging_handler(
+ name: str = "charset_normalizer",
+ level: int = logging.INFO,
+ format_string: str = "%(asctime)s | %(levelname)s | %(message)s",
+) -> None:
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter(format_string))
+ logger.addHandler(handler)
+
+
+def cut_sequence_chunks(
+ sequences: bytes,
+ encoding_iana: str,
+ offsets: range,
+ chunk_size: int,
+ bom_or_sig_available: bool,
+ strip_sig_or_bom: bool,
+ sig_payload: bytes,
+ is_multi_byte_decoder: bool,
+ decoded_payload: str | None = None,
+) -> Generator[str, None, None]:
+ if decoded_payload and is_multi_byte_decoder is False:
+ for i in offsets:
+ chunk = decoded_payload[i : i + chunk_size]
+ if not chunk:
+ break
+ yield chunk
+ else:
+ for i in offsets:
+ chunk_end = i + chunk_size
+ if chunk_end > len(sequences) + 8:
+ continue
+
+ cut_sequence = sequences[i : i + chunk_size]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(
+ encoding_iana,
+ errors="ignore" if is_multi_byte_decoder else "strict",
+ )
+
+ # multi-byte bad cutting detector and adjustment
+ # not the cleanest way to perform that fix but clever enough for now.
+ if is_multi_byte_decoder and i > 0:
+ chunk_partial_size_chk: int = min(chunk_size, 16)
+
+ if (
+ decoded_payload
+ and chunk[:chunk_partial_size_chk] not in decoded_payload
+ ):
+ for j in range(i, i - 4, -1):
+ cut_sequence = sequences[j:chunk_end]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(encoding_iana, errors="ignore")
+
+ if chunk[:chunk_partial_size_chk] in decoded_payload:
+ break
+
+ yield chunk
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/version.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/version.py"
new file mode 100644
index 0000000..c843e53
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/charset_normalizer/version.py"
@@ -0,0 +1,8 @@
+"""
+Expose version
+"""
+
+from __future__ import annotations
+
+__version__ = "3.4.4"
+VERSION = __version__.split(".")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/METADATA"
new file mode 100644
index 0000000..a1b5c57
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/METADATA"
@@ -0,0 +1,441 @@
+Metadata-Version: 2.1
+Name: colorama
+Version: 0.4.6
+Summary: Cross-platform colored terminal text.
+Project-URL: Homepage, https://github.com/tartley/colorama
+Author-email: Jonathan Hartley <tartley@tartley.com>
+License-File: LICENSE.txt
+Keywords: ansi,color,colour,crossplatform,terminal,text,windows,xplatform
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Terminals
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7
+Description-Content-Type: text/x-rst
+
+.. image:: https://img.shields.io/pypi/v/colorama.svg
+ :target: https://pypi.org/project/colorama/
+ :alt: Latest Version
+
+.. image:: https://img.shields.io/pypi/pyversions/colorama.svg
+ :target: https://pypi.org/project/colorama/
+ :alt: Supported Python versions
+
+.. image:: https://github.com/tartley/colorama/actions/workflows/test.yml/badge.svg
+ :target: https://github.com/tartley/colorama/actions/workflows/test.yml
+ :alt: Build Status
+
+Colorama
+========
+
+Makes ANSI escape character sequences (for producing colored terminal text and
+cursor positioning) work under MS Windows.
+
+.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif
+ :target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD
+ :alt: Donate with Paypal
+
+`PyPI for releases <https://pypi.org/project/colorama/>`_ |
+`Github for source <https://github.com/tartley/colorama>`_ |
+`Colorama for enterprise on Tidelift <https://github.com/tartley/colorama/blob/master/ENTERPRISE.md>`_
+
+If you find Colorama useful, please |donate| to the authors. Thank you!
+
+Installation
+------------
+
+Tested on CPython 2.7, 3.7, 3.8, 3.9 and 3.10 and Pypy 2.7 and 3.8.
+
+No requirements other than the standard library.
+
+.. code-block:: bash
+
+ pip install colorama
+ # or
+ conda install -c anaconda colorama
+
+Description
+-----------
+
+ANSI escape character sequences have long been used to produce colored terminal
+text and cursor positioning on Unix and Macs. Colorama makes this work on
+Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which
+would appear as gobbledygook in the output), and converting them into the
+appropriate win32 calls to modify the state of the terminal. On other platforms,
+Colorama does nothing.
+
+This has the upshot of providing a simple cross-platform API for printing
+colored terminal text from Python, and has the happy side-effect that existing
+applications or libraries which use ANSI sequences to produce colored output on
+Linux or Macs can now also work on Windows, simply by calling
+``colorama.just_fix_windows_console()`` (since v0.4.6) or ``colorama.init()``
+(all versions, but may have other side-effects – see below).
+
+An alternative approach is to install ``ansi.sys`` on Windows machines, which
+provides the same behaviour for all applications running in terminals. Colorama
+is intended for situations where that isn't easy (e.g., maybe your app doesn't
+have an installer.)
+
+Demo scripts in the source code repository print some colored text using
+ANSI sequences. Compare their output under Gnome-terminal's built in ANSI
+handling, versus on Windows Command-Prompt using Colorama:
+
+.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png
+ :width: 661
+ :height: 357
+ :alt: ANSI sequences on Ubuntu under gnome-terminal.
+
+.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png
+ :width: 668
+ :height: 325
+ :alt: Same ANSI sequences on Windows, using Colorama.
+
+These screenshots show that, on Windows, Colorama does not support ANSI 'dim
+text'; it looks the same as 'normal text'.
+
+Usage
+-----
+
+Initialisation
+..............
+
+If the only thing you want from Colorama is to get ANSI escapes to work on
+Windows, then run:
+
+.. code-block:: python
+
+ from colorama import just_fix_windows_console
+ just_fix_windows_console()
+
+If you're on a recent version of Windows 10 or better, and your stdout/stderr
+are pointing to a Windows console, then this will flip the magic configuration
+switch to enable Windows' built-in ANSI support.
+
+If you're on an older version of Windows, and your stdout/stderr are pointing to
+a Windows console, then this will wrap ``sys.stdout`` and/or ``sys.stderr`` in a
+magic file object that intercepts ANSI escape sequences and issues the
+appropriate Win32 calls to emulate them.
+
+In all other circumstances, it does nothing whatsoever. Basically the idea is
+that this makes Windows act like Unix with respect to ANSI escape handling.
+
+It's safe to call this function multiple times. It's safe to call this function
+on non-Windows platforms, but it won't do anything. It's safe to call this
+function when one or both of your stdout/stderr are redirected to a file – it
+won't do anything to those streams.
+
+Alternatively, you can use the older interface with more features (but also more
+potential footguns):
+
+.. code-block:: python
+
+ from colorama import init
+ init()
+
+This does the same thing as ``just_fix_windows_console``, except for the
+following differences:
+
+- It's not safe to call ``init`` multiple times; you can end up with multiple
+ layers of wrapping and broken ANSI support.
+
+- Colorama will apply a heuristic to guess whether stdout/stderr support ANSI,
+ and if it thinks they don't, then it will wrap ``sys.stdout`` and
+ ``sys.stderr`` in a magic file object that strips out ANSI escape sequences
+ before printing them. This happens on all platforms, and can be convenient if
+ you want to write your code to emit ANSI escape sequences unconditionally, and
+ let Colorama decide whether they should actually be output. But note that
+ Colorama's heuristic is not particularly clever.
+
+- ``init`` also accepts explicit keyword args to enable/disable various
+ functionality – see below.
+
+To stop using Colorama before your program exits, simply call ``deinit()``.
+This will restore ``stdout`` and ``stderr`` to their original values, so that
+Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is
+cheaper than calling ``init()`` again (but does the same thing).
+
+Most users should depend on ``colorama >= 0.4.6``, and use
+``just_fix_windows_console``. The old ``init`` interface will be supported
+indefinitely for backwards compatibility, but we don't plan to fix any issues
+with it, also for backwards compatibility.
+
+Colored Output
+..............
+
+Cross-platform printing of colored text can then be done using Colorama's
+constant shorthand for ANSI escape sequences. These are deliberately
+rudimentary, see below.
+
+.. code-block:: python
+
+ from colorama import Fore, Back, Style
+ print(Fore.RED + 'some red text')
+ print(Back.GREEN + 'and with a green background')
+ print(Style.DIM + 'and in dim text')
+ print(Style.RESET_ALL)
+ print('back to normal now')
+
+...or simply by manually printing ANSI sequences from your own code:
+
+.. code-block:: python
+
+ print('\033[31m' + 'some red text')
+ print('\033[39m') # and reset to default color
+
+...or, Colorama can be used in conjunction with existing ANSI libraries
+such as the venerable `Termcolor <https://pypi.org/project/termcolor/>`_
+the fabulous `Blessings <https://pypi.org/project/blessings/>`_,
+or the incredible `_Rich <https://pypi.org/project/rich/>`_.
+
+If you wish Colorama's Fore, Back and Style constants were more capable,
+then consider using one of the above highly capable libraries to generate
+colors, etc, and use Colorama just for its primary purpose: to convert
+those ANSI sequences to also work on Windows:
+
+SIMILARLY, do not send PRs adding the generation of new ANSI types to Colorama.
+We are only interested in converting ANSI codes to win32 API calls, not
+shortcuts like the above to generate ANSI characters.
+
+.. code-block:: python
+
+ from colorama import just_fix_windows_console
+ from termcolor import colored
+
+ # use Colorama to make Termcolor work on Windows too
+ just_fix_windows_console()
+
+ # then use Termcolor for all colored text output
+ print(colored('Hello, World!', 'green', 'on_red'))
+
+Available formatting constants are::
+
+ Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
+ Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
+ Style: DIM, NORMAL, BRIGHT, RESET_ALL
+
+``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will
+perform this reset automatically on program exit.
+
+These are fairly well supported, but not part of the standard::
+
+ Fore: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX
+ Back: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX
+
+Cursor Positioning
+..................
+
+ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for
+an example of how to generate them.
+
+Init Keyword Args
+.................
+
+``init()`` accepts some ``**kwargs`` to override default behaviour.
+
+init(autoreset=False):
+ If you find yourself repeatedly sending reset sequences to turn off color
+ changes at the end of every print, then ``init(autoreset=True)`` will
+ automate that:
+
+ .. code-block:: python
+
+ from colorama import init
+ init(autoreset=True)
+ print(Fore.RED + 'some red text')
+ print('automatically back to default color again')
+
+init(strip=None):
+ Pass ``True`` or ``False`` to override whether ANSI codes should be
+ stripped from the output. The default behaviour is to strip if on Windows
+ or if output is redirected (not a tty).
+
+init(convert=None):
+ Pass ``True`` or ``False`` to override whether to convert ANSI codes in the
+ output into win32 calls. The default behaviour is to convert if on Windows
+ and output is to a tty (terminal).
+
+init(wrap=True):
+ On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr``
+ with proxy objects, which override the ``.write()`` method to do their work.
+ If this wrapping causes you problems, then this can be disabled by passing
+ ``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or
+ ``strip`` or ``convert`` are True.
+
+ When wrapping is disabled, colored printing on non-Windows platforms will
+ continue to work as normal. To do cross-platform colored output, you can
+ use Colorama's ``AnsiToWin32`` proxy directly:
+
+ .. code-block:: python
+
+ import sys
+ from colorama import init, AnsiToWin32
+ init(wrap=False)
+ stream = AnsiToWin32(sys.stderr).stream
+
+ # Python 2
+ print >>stream, Fore.BLUE + 'blue text on stderr'
+
+ # Python 3
+ print(Fore.BLUE + 'blue text on stderr', file=stream)
+
+Recognised ANSI Sequences
+.........................
+
+ANSI sequences generally take the form::
+
+ ESC [ <param> ; <param> ... <command>
+
+Where ``<param>`` is an integer, and ``<command>`` is a single letter. Zero or
+more params are passed to a ``<command>``. If no params are passed, it is
+generally synonymous with passing a single zero. No spaces exist in the
+sequence; they have been inserted here simply to read more easily.
+
+The only ANSI sequences that Colorama converts into win32 calls are::
+
+ ESC [ 0 m # reset all (colors and brightness)
+ ESC [ 1 m # bright
+ ESC [ 2 m # dim (looks same as normal brightness)
+ ESC [ 22 m # normal brightness
+
+ # FOREGROUND:
+ ESC [ 30 m # black
+ ESC [ 31 m # red
+ ESC [ 32 m # green
+ ESC [ 33 m # yellow
+ ESC [ 34 m # blue
+ ESC [ 35 m # magenta
+ ESC [ 36 m # cyan
+ ESC [ 37 m # white
+ ESC [ 39 m # reset
+
+ # BACKGROUND
+ ESC [ 40 m # black
+ ESC [ 41 m # red
+ ESC [ 42 m # green
+ ESC [ 43 m # yellow
+ ESC [ 44 m # blue
+ ESC [ 45 m # magenta
+ ESC [ 46 m # cyan
+ ESC [ 47 m # white
+ ESC [ 49 m # reset
+
+ # cursor positioning
+ ESC [ y;x H # position cursor at x across, y down
+ ESC [ y;x f # position cursor at x across, y down
+ ESC [ n A # move cursor n lines up
+ ESC [ n B # move cursor n lines down
+ ESC [ n C # move cursor n characters forward
+ ESC [ n D # move cursor n characters backward
+
+ # clear the screen
+ ESC [ mode J # clear the screen
+
+ # clear the line
+ ESC [ mode K # clear the line
+
+Multiple numeric params to the ``'m'`` command can be combined into a single
+sequence::
+
+ ESC [ 36 ; 45 ; 1 m # bright cyan text on magenta background
+
+All other ANSI sequences of the form ``ESC [ <param> ; <param> ... <command>``
+are silently stripped from the output on Windows.
+
+Any other form of ANSI sequence, such as single-character codes or alternative
+initial characters, are not recognised or stripped. It would be cool to add
+them though. Let me know if it would be useful for you, via the Issues on
+GitHub.
+
+Status & Known Problems
+-----------------------
+
+I've personally only tested it on Windows XP (CMD, Console2), Ubuntu
+(gnome-terminal, xterm), and OS X.
+
+Some valid ANSI sequences aren't recognised.
+
+If you're hacking on the code, see `README-hacking.md`_. ESPECIALLY, see the
+explanation there of why we do not want PRs that allow Colorama to generate new
+types of ANSI codes.
+
+See outstanding issues and wish-list:
+https://github.com/tartley/colorama/issues
+
+If anything doesn't work for you, or doesn't do what you expected or hoped for,
+I'd love to hear about it on that issues list, would be delighted by patches,
+and would be happy to grant commit access to anyone who submits a working patch
+or two.
+
+.. _README-hacking.md: README-hacking.md
+
+License
+-------
+
+Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see
+LICENSE file.
+
+Professional support
+--------------------
+
+.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png
+ :alt: Tidelift
+ :target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for colorama is available as part of the
+ `Tidelift Subscription`_.
+ Tidelift gives software development teams a single source for purchasing
+ and maintaining their software, with professional grade assurances from
+ the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme
+
+Thanks
+------
+
+See the CHANGELOG for more thanks!
+
+* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5.
+* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``,
+ providing a solution to issue #7's setuptools/distutils debate,
+ and other fixes.
+* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``.
+* Matthew McCormick for politely pointing out a longstanding crash on non-Win.
+* Ben Hoyt, for a magnificent fix under 64-bit Windows.
+* Jesse at Empty Square for submitting a fix for examples in the README.
+* User 'jamessp', an observant documentation fix for cursor positioning.
+* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7
+ fix.
+* Julien Stuyck, for wisely suggesting Python3 compatible updates to README.
+* Daniel Griffith for multiple fabulous patches.
+* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty
+ output.
+* Roger Binns, for many suggestions, valuable feedback, & bug reports.
+* Tim Golden for thought and much appreciated feedback on the initial idea.
+* User 'Zearin' for updates to the README file.
+* John Szakmeister for adding support for light colors
+* Charles Merriam for adding documentation to demos
+* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes
+* Florian Bruhin for a fix when stdout or stderr are None
+* Thomas Weininger for fixing ValueError on Windows
+* Remi Rampin for better Github integration and fixes to the README file
+* Simeon Visser for closing a file handle using 'with' and updating classifiers
+ to include Python 3.3 and 3.4
+* Andy Neff for fixing RESET of LIGHT_EX colors.
+* Jonathan Hartley for the initial idea and implementation.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/RECORD"
new file mode 100644
index 0000000..cd6b130
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/RECORD"
@@ -0,0 +1,31 @@
+colorama-0.4.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+colorama-0.4.6.dist-info/METADATA,sha256=e67SnrUMOym9sz_4TjF3vxvAV4T3aF7NyqRHHH3YEMw,17158
+colorama-0.4.6.dist-info/RECORD,,
+colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105
+colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491
+colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266
+colorama/__pycache__/__init__.cpython-312.pyc,,
+colorama/__pycache__/ansi.cpython-312.pyc,,
+colorama/__pycache__/ansitowin32.cpython-312.pyc,,
+colorama/__pycache__/initialise.cpython-312.pyc,,
+colorama/__pycache__/win32.cpython-312.pyc,,
+colorama/__pycache__/winterm.cpython-312.pyc,,
+colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
+colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128
+colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325
+colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75
+colorama/tests/__pycache__/__init__.cpython-312.pyc,,
+colorama/tests/__pycache__/ansi_test.cpython-312.pyc,,
+colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc,,
+colorama/tests/__pycache__/initialise_test.cpython-312.pyc,,
+colorama/tests/__pycache__/isatty_test.cpython-312.pyc,,
+colorama/tests/__pycache__/utils.cpython-312.pyc,,
+colorama/tests/__pycache__/winterm_test.cpython-312.pyc,,
+colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839
+colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678
+colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741
+colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866
+colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079
+colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709
+colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181
+colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL"
new file mode 100644
index 0000000..d79189f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.11.1
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt"
new file mode 100644
index 0000000..3105888
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt"
@@ -0,0 +1,27 @@
+Copyright (c) 2010 Jonathan Hartley
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holders, nor those of its contributors
+ may be used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/__init__.py"
new file mode 100644
index 0000000..383101c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/__init__.py"
@@ -0,0 +1,7 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console
+from .ansi import Fore, Back, Style, Cursor
+from .ansitowin32 import AnsiToWin32
+
+__version__ = '0.4.6'
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/ansi.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/ansi.py"
new file mode 100644
index 0000000..11ec695
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/ansi.py"
@@ -0,0 +1,102 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+'''
+This module generates ANSI character codes to printing colors to terminals.
+See: http://en.wikipedia.org/wiki/ANSI_escape_code
+'''
+
+CSI = '\033['
+OSC = '\033]'
+BEL = '\a'
+
+
+def code_to_chars(code):
+ return CSI + str(code) + 'm'
+
+def set_title(title):
+ return OSC + '2;' + title + BEL
+
+def clear_screen(mode=2):
+ return CSI + str(mode) + 'J'
+
+def clear_line(mode=2):
+ return CSI + str(mode) + 'K'
+
+
+class AnsiCodes(object):
+ def __init__(self):
+ # the subclasses declare class attributes which are numbers.
+ # Upon instantiation we define instance attributes, which are the same
+ # as the class attributes but wrapped with the ANSI escape sequence
+ for name in dir(self):
+ if not name.startswith('_'):
+ value = getattr(self, name)
+ setattr(self, name, code_to_chars(value))
+
+
+class AnsiCursor(object):
+ def UP(self, n=1):
+ return CSI + str(n) + 'A'
+ def DOWN(self, n=1):
+ return CSI + str(n) + 'B'
+ def FORWARD(self, n=1):
+ return CSI + str(n) + 'C'
+ def BACK(self, n=1):
+ return CSI + str(n) + 'D'
+ def POS(self, x=1, y=1):
+ return CSI + str(y) + ';' + str(x) + 'H'
+
+
+class AnsiFore(AnsiCodes):
+ BLACK = 30
+ RED = 31
+ GREEN = 32
+ YELLOW = 33
+ BLUE = 34
+ MAGENTA = 35
+ CYAN = 36
+ WHITE = 37
+ RESET = 39
+
+ # These are fairly well supported, but not part of the standard.
+ LIGHTBLACK_EX = 90
+ LIGHTRED_EX = 91
+ LIGHTGREEN_EX = 92
+ LIGHTYELLOW_EX = 93
+ LIGHTBLUE_EX = 94
+ LIGHTMAGENTA_EX = 95
+ LIGHTCYAN_EX = 96
+ LIGHTWHITE_EX = 97
+
+
+class AnsiBack(AnsiCodes):
+ BLACK = 40
+ RED = 41
+ GREEN = 42
+ YELLOW = 43
+ BLUE = 44
+ MAGENTA = 45
+ CYAN = 46
+ WHITE = 47
+ RESET = 49
+
+ # These are fairly well supported, but not part of the standard.
+ LIGHTBLACK_EX = 100
+ LIGHTRED_EX = 101
+ LIGHTGREEN_EX = 102
+ LIGHTYELLOW_EX = 103
+ LIGHTBLUE_EX = 104
+ LIGHTMAGENTA_EX = 105
+ LIGHTCYAN_EX = 106
+ LIGHTWHITE_EX = 107
+
+
+class AnsiStyle(AnsiCodes):
+ BRIGHT = 1
+ DIM = 2
+ NORMAL = 22
+ RESET_ALL = 0
+
+Fore = AnsiFore()
+Back = AnsiBack()
+Style = AnsiStyle()
+Cursor = AnsiCursor()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/ansitowin32.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/ansitowin32.py"
new file mode 100644
index 0000000..abf209e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/ansitowin32.py"
@@ -0,0 +1,277 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import re
+import sys
+import os
+
+from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL
+from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle
+from .win32 import windll, winapi_test
+
+
+winterm = None
+if windll is not None:
+ winterm = WinTerm()
+
+
+class StreamWrapper(object):
+ '''
+ Wraps a stream (such as stdout), acting as a transparent proxy for all
+ attribute access apart from method 'write()', which is delegated to our
+ Converter instance.
+ '''
+ def __init__(self, wrapped, converter):
+ # double-underscore everything to prevent clashes with names of
+ # attributes on the wrapped stream object.
+ self.__wrapped = wrapped
+ self.__convertor = converter
+
+ def __getattr__(self, name):
+ return getattr(self.__wrapped, name)
+
+ def __enter__(self, *args, **kwargs):
+ # special method lookup bypasses __getattr__/__getattribute__, see
+ # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
+ # thus, contextlib magic methods are not proxied via __getattr__
+ return self.__wrapped.__enter__(*args, **kwargs)
+
+ def __exit__(self, *args, **kwargs):
+ return self.__wrapped.__exit__(*args, **kwargs)
+
+ def __setstate__(self, state):
+ self.__dict__ = state
+
+ def __getstate__(self):
+ return self.__dict__
+
+ def write(self, text):
+ self.__convertor.write(text)
+
+ def isatty(self):
+ stream = self.__wrapped
+ if 'PYCHARM_HOSTED' in os.environ:
+ if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
+ return True
+ try:
+ stream_isatty = stream.isatty
+ except AttributeError:
+ return False
+ else:
+ return stream_isatty()
+
+ @property
+ def closed(self):
+ stream = self.__wrapped
+ try:
+ return stream.closed
+ # AttributeError in the case that the stream doesn't support being closed
+ # ValueError for the case that the stream has already been detached when atexit runs
+ except (AttributeError, ValueError):
+ return True
+
+
+class AnsiToWin32(object):
+ '''
+ Implements a 'write()' method which, on Windows, will strip ANSI character
+ sequences from the text, and if outputting to a tty, will convert them into
+ win32 function calls.
+ '''
+ ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
+ ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command
+
+ def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
+ # The wrapped stream (normally sys.stdout or sys.stderr)
+ self.wrapped = wrapped
+
+ # should we reset colors to defaults after every .write()
+ self.autoreset = autoreset
+
+ # create the proxy wrapping our output stream
+ self.stream = StreamWrapper(wrapped, self)
+
+ on_windows = os.name == 'nt'
+ # We test if the WinAPI works, because even if we are on Windows
+ # we may be using a terminal that doesn't support the WinAPI
+ # (e.g. Cygwin Terminal). In this case it's up to the terminal
+ # to support the ANSI codes.
+ conversion_supported = on_windows and winapi_test()
+ try:
+ fd = wrapped.fileno()
+ except Exception:
+ fd = -1
+ system_has_native_ansi = not on_windows or enable_vt_processing(fd)
+ have_tty = not self.stream.closed and self.stream.isatty()
+ need_conversion = conversion_supported and not system_has_native_ansi
+
+ # should we strip ANSI sequences from our output?
+ if strip is None:
+ strip = need_conversion or not have_tty
+ self.strip = strip
+
+ # should we should convert ANSI sequences into win32 calls?
+ if convert is None:
+ convert = need_conversion and have_tty
+ self.convert = convert
+
+ # dict of ansi codes to win32 functions and parameters
+ self.win32_calls = self.get_win32_calls()
+
+ # are we wrapping stderr?
+ self.on_stderr = self.wrapped is sys.stderr
+
+ def should_wrap(self):
+ '''
+ True if this class is actually needed. If false, then the output
+ stream will not be affected, nor will win32 calls be issued, so
+ wrapping stdout is not actually required. This will generally be
+ False on non-Windows platforms, unless optional functionality like
+ autoreset has been requested using kwargs to init()
+ '''
+ return self.convert or self.strip or self.autoreset
+
+ def get_win32_calls(self):
+ if self.convert and winterm:
+ return {
+ AnsiStyle.RESET_ALL: (winterm.reset_all, ),
+ AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
+ AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
+ AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
+ AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
+ AnsiFore.RED: (winterm.fore, WinColor.RED),
+ AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
+ AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
+ AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
+ AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
+ AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
+ AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
+ AnsiFore.RESET: (winterm.fore, ),
+ AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
+ AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
+ AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
+ AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
+ AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
+ AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
+ AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
+ AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
+ AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
+ AnsiBack.RED: (winterm.back, WinColor.RED),
+ AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
+ AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
+ AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
+ AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
+ AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
+ AnsiBack.WHITE: (winterm.back, WinColor.GREY),
+ AnsiBack.RESET: (winterm.back, ),
+ AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
+ AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
+ AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
+ AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
+ AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
+ AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
+ AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
+ AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
+ }
+ return dict()
+
+ def write(self, text):
+ if self.strip or self.convert:
+ self.write_and_convert(text)
+ else:
+ self.wrapped.write(text)
+ self.wrapped.flush()
+ if self.autoreset:
+ self.reset_all()
+
+
+ def reset_all(self):
+ if self.convert:
+ self.call_win32('m', (0,))
+ elif not self.strip and not self.stream.closed:
+ self.wrapped.write(Style.RESET_ALL)
+
+
+ def write_and_convert(self, text):
+ '''
+ Write the given text to our wrapped stream, stripping any ANSI
+ sequences from the text, and optionally converting them into win32
+ calls.
+ '''
+ cursor = 0
+ text = self.convert_osc(text)
+ for match in self.ANSI_CSI_RE.finditer(text):
+ start, end = match.span()
+ self.write_plain_text(text, cursor, start)
+ self.convert_ansi(*match.groups())
+ cursor = end
+ self.write_plain_text(text, cursor, len(text))
+
+
+ def write_plain_text(self, text, start, end):
+ if start < end:
+ self.wrapped.write(text[start:end])
+ self.wrapped.flush()
+
+
+ def convert_ansi(self, paramstring, command):
+ if self.convert:
+ params = self.extract_params(command, paramstring)
+ self.call_win32(command, params)
+
+
+ def extract_params(self, command, paramstring):
+ if command in 'Hf':
+ params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
+ while len(params) < 2:
+ # defaults:
+ params = params + (1,)
+ else:
+ params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
+ if len(params) == 0:
+ # defaults:
+ if command in 'JKm':
+ params = (0,)
+ elif command in 'ABCD':
+ params = (1,)
+
+ return params
+
+
+ def call_win32(self, command, params):
+ if command == 'm':
+ for param in params:
+ if param in self.win32_calls:
+ func_args = self.win32_calls[param]
+ func = func_args[0]
+ args = func_args[1:]
+ kwargs = dict(on_stderr=self.on_stderr)
+ func(*args, **kwargs)
+ elif command in 'J':
+ winterm.erase_screen(params[0], on_stderr=self.on_stderr)
+ elif command in 'K':
+ winterm.erase_line(params[0], on_stderr=self.on_stderr)
+ elif command in 'Hf': # cursor position - absolute
+ winterm.set_cursor_position(params, on_stderr=self.on_stderr)
+ elif command in 'ABCD': # cursor position - relative
+ n = params[0]
+ # A - up, B - down, C - forward, D - back
+ x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
+ winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
+
+
+ def convert_osc(self, text):
+ for match in self.ANSI_OSC_RE.finditer(text):
+ start, end = match.span()
+ text = text[:start] + text[end:]
+ paramstring, command = match.groups()
+ if command == BEL:
+ if paramstring.count(";") == 1:
+ params = paramstring.split(";")
+ # 0 - change title and icon (we will only change title)
+ # 1 - change icon (we don't support this)
+ # 2 - change title
+ if params[0] in '02':
+ winterm.set_title(params[1])
+ return text
+
+
+ def flush(self):
+ self.wrapped.flush()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/initialise.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/initialise.py"
new file mode 100644
index 0000000..d5fd4b7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/initialise.py"
@@ -0,0 +1,121 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import atexit
+import contextlib
+import sys
+
+from .ansitowin32 import AnsiToWin32
+
+
+def _wipe_internal_state_for_tests():
+ global orig_stdout, orig_stderr
+ orig_stdout = None
+ orig_stderr = None
+
+ global wrapped_stdout, wrapped_stderr
+ wrapped_stdout = None
+ wrapped_stderr = None
+
+ global atexit_done
+ atexit_done = False
+
+ global fixed_windows_console
+ fixed_windows_console = False
+
+ try:
+ # no-op if it wasn't registered
+ atexit.unregister(reset_all)
+ except AttributeError:
+ # python 2: no atexit.unregister. Oh well, we did our best.
+ pass
+
+
+def reset_all():
+ if AnsiToWin32 is not None: # Issue #74: objects might become None at exit
+ AnsiToWin32(orig_stdout).reset_all()
+
+
+def init(autoreset=False, convert=None, strip=None, wrap=True):
+
+ if not wrap and any([autoreset, convert, strip]):
+ raise ValueError('wrap=False conflicts with any other arg=True')
+
+ global wrapped_stdout, wrapped_stderr
+ global orig_stdout, orig_stderr
+
+ orig_stdout = sys.stdout
+ orig_stderr = sys.stderr
+
+ if sys.stdout is None:
+ wrapped_stdout = None
+ else:
+ sys.stdout = wrapped_stdout = \
+ wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
+ if sys.stderr is None:
+ wrapped_stderr = None
+ else:
+ sys.stderr = wrapped_stderr = \
+ wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
+
+ global atexit_done
+ if not atexit_done:
+ atexit.register(reset_all)
+ atexit_done = True
+
+
+def deinit():
+ if orig_stdout is not None:
+ sys.stdout = orig_stdout
+ if orig_stderr is not None:
+ sys.stderr = orig_stderr
+
+
+def just_fix_windows_console():
+ global fixed_windows_console
+
+ if sys.platform != "win32":
+ return
+ if fixed_windows_console:
+ return
+ if wrapped_stdout is not None or wrapped_stderr is not None:
+ # Someone already ran init() and it did stuff, so we won't second-guess them
+ return
+
+ # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the
+ # native ANSI support in the console as a side-effect. We only need to actually
+ # replace sys.stdout/stderr if we're in the old-style conversion mode.
+ new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False)
+ if new_stdout.convert:
+ sys.stdout = new_stdout
+ new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False)
+ if new_stderr.convert:
+ sys.stderr = new_stderr
+
+ fixed_windows_console = True
+
+@contextlib.contextmanager
+def colorama_text(*args, **kwargs):
+ init(*args, **kwargs)
+ try:
+ yield
+ finally:
+ deinit()
+
+
+def reinit():
+ if wrapped_stdout is not None:
+ sys.stdout = wrapped_stdout
+ if wrapped_stderr is not None:
+ sys.stderr = wrapped_stderr
+
+
+def wrap_stream(stream, convert, strip, autoreset, wrap):
+ if wrap:
+ wrapper = AnsiToWin32(stream,
+ convert=convert, strip=strip, autoreset=autoreset)
+ if wrapper.should_wrap():
+ stream = wrapper.stream
+ return stream
+
+
+# Use this for initial setup as well, to reduce code duplication
+_wipe_internal_state_for_tests()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/__init__.py"
new file mode 100644
index 0000000..8c5661e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/__init__.py"
@@ -0,0 +1 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/ansi_test.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/ansi_test.py"
new file mode 100644
index 0000000..0a20c80
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/ansi_test.py"
@@ -0,0 +1,76 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main
+
+from ..ansi import Back, Fore, Style
+from ..ansitowin32 import AnsiToWin32
+
+stdout_orig = sys.stdout
+stderr_orig = sys.stderr
+
+
+class AnsiTest(TestCase):
+
+ def setUp(self):
+ # sanity check: stdout should be a file or StringIO object.
+ # It will only be AnsiToWin32 if init() has previously wrapped it
+ self.assertNotEqual(type(sys.stdout), AnsiToWin32)
+ self.assertNotEqual(type(sys.stderr), AnsiToWin32)
+
+ def tearDown(self):
+ sys.stdout = stdout_orig
+ sys.stderr = stderr_orig
+
+
+ def testForeAttributes(self):
+ self.assertEqual(Fore.BLACK, '\033[30m')
+ self.assertEqual(Fore.RED, '\033[31m')
+ self.assertEqual(Fore.GREEN, '\033[32m')
+ self.assertEqual(Fore.YELLOW, '\033[33m')
+ self.assertEqual(Fore.BLUE, '\033[34m')
+ self.assertEqual(Fore.MAGENTA, '\033[35m')
+ self.assertEqual(Fore.CYAN, '\033[36m')
+ self.assertEqual(Fore.WHITE, '\033[37m')
+ self.assertEqual(Fore.RESET, '\033[39m')
+
+ # Check the light, extended versions.
+ self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m')
+ self.assertEqual(Fore.LIGHTRED_EX, '\033[91m')
+ self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m')
+ self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m')
+ self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m')
+ self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m')
+ self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m')
+ self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m')
+
+
+ def testBackAttributes(self):
+ self.assertEqual(Back.BLACK, '\033[40m')
+ self.assertEqual(Back.RED, '\033[41m')
+ self.assertEqual(Back.GREEN, '\033[42m')
+ self.assertEqual(Back.YELLOW, '\033[43m')
+ self.assertEqual(Back.BLUE, '\033[44m')
+ self.assertEqual(Back.MAGENTA, '\033[45m')
+ self.assertEqual(Back.CYAN, '\033[46m')
+ self.assertEqual(Back.WHITE, '\033[47m')
+ self.assertEqual(Back.RESET, '\033[49m')
+
+ # Check the light, extended versions.
+ self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m')
+ self.assertEqual(Back.LIGHTRED_EX, '\033[101m')
+ self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m')
+ self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m')
+ self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m')
+ self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m')
+ self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m')
+ self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m')
+
+
+ def testStyleAttributes(self):
+ self.assertEqual(Style.DIM, '\033[2m')
+ self.assertEqual(Style.NORMAL, '\033[22m')
+ self.assertEqual(Style.BRIGHT, '\033[1m')
+
+
+if __name__ == '__main__':
+ main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/ansitowin32_test.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/ansitowin32_test.py"
new file mode 100644
index 0000000..91ca551
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/ansitowin32_test.py"
@@ -0,0 +1,294 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from io import StringIO, TextIOWrapper
+from unittest import TestCase, main
+try:
+ from contextlib import ExitStack
+except ImportError:
+ # python 2
+ from contextlib2 import ExitStack
+
+try:
+ from unittest.mock import MagicMock, Mock, patch
+except ImportError:
+ from mock import MagicMock, Mock, patch
+
+from ..ansitowin32 import AnsiToWin32, StreamWrapper
+from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING
+from .utils import osname
+
+
+class StreamWrapperTest(TestCase):
+
+ def testIsAProxy(self):
+ mockStream = Mock()
+ wrapper = StreamWrapper(mockStream, None)
+ self.assertTrue( wrapper.random_attr is mockStream.random_attr )
+
+ def testDelegatesWrite(self):
+ mockStream = Mock()
+ mockConverter = Mock()
+ wrapper = StreamWrapper(mockStream, mockConverter)
+ wrapper.write('hello')
+ self.assertTrue(mockConverter.write.call_args, (('hello',), {}))
+
+ def testDelegatesContext(self):
+ mockConverter = Mock()
+ s = StringIO()
+ with StreamWrapper(s, mockConverter) as fp:
+ fp.write(u'hello')
+ self.assertTrue(s.closed)
+
+ def testProxyNoContextManager(self):
+ mockStream = MagicMock()
+ mockStream.__enter__.side_effect = AttributeError()
+ mockConverter = Mock()
+ with self.assertRaises(AttributeError) as excinfo:
+ with StreamWrapper(mockStream, mockConverter) as wrapper:
+ wrapper.write('hello')
+
+ def test_closed_shouldnt_raise_on_closed_stream(self):
+ stream = StringIO()
+ stream.close()
+ wrapper = StreamWrapper(stream, None)
+ self.assertEqual(wrapper.closed, True)
+
+ def test_closed_shouldnt_raise_on_detached_stream(self):
+ stream = TextIOWrapper(StringIO())
+ stream.detach()
+ wrapper = StreamWrapper(stream, None)
+ self.assertEqual(wrapper.closed, True)
+
+class AnsiToWin32Test(TestCase):
+
+ def testInit(self):
+ mockStdout = Mock()
+ auto = Mock()
+ stream = AnsiToWin32(mockStdout, autoreset=auto)
+ self.assertEqual(stream.wrapped, mockStdout)
+ self.assertEqual(stream.autoreset, auto)
+
+ @patch('colorama.ansitowin32.winterm', None)
+ @patch('colorama.ansitowin32.winapi_test', lambda *_: True)
+ def testStripIsTrueOnWindows(self):
+ with osname('nt'):
+ mockStdout = Mock()
+ stream = AnsiToWin32(mockStdout)
+ self.assertTrue(stream.strip)
+
+ def testStripIsFalseOffWindows(self):
+ with osname('posix'):
+ mockStdout = Mock(closed=False)
+ stream = AnsiToWin32(mockStdout)
+ self.assertFalse(stream.strip)
+
+ def testWriteStripsAnsi(self):
+ mockStdout = Mock()
+ stream = AnsiToWin32(mockStdout)
+ stream.wrapped = Mock()
+ stream.write_and_convert = Mock()
+ stream.strip = True
+
+ stream.write('abc')
+
+ self.assertFalse(stream.wrapped.write.called)
+ self.assertEqual(stream.write_and_convert.call_args, (('abc',), {}))
+
+ def testWriteDoesNotStripAnsi(self):
+ mockStdout = Mock()
+ stream = AnsiToWin32(mockStdout)
+ stream.wrapped = Mock()
+ stream.write_and_convert = Mock()
+ stream.strip = False
+ stream.convert = False
+
+ stream.write('abc')
+
+ self.assertFalse(stream.write_and_convert.called)
+ self.assertEqual(stream.wrapped.write.call_args, (('abc',), {}))
+
+ def assert_autoresets(self, convert, autoreset=True):
+ stream = AnsiToWin32(Mock())
+ stream.convert = convert
+ stream.reset_all = Mock()
+ stream.autoreset = autoreset
+ stream.winterm = Mock()
+
+ stream.write('abc')
+
+ self.assertEqual(stream.reset_all.called, autoreset)
+
+ def testWriteAutoresets(self):
+ self.assert_autoresets(convert=True)
+ self.assert_autoresets(convert=False)
+ self.assert_autoresets(convert=True, autoreset=False)
+ self.assert_autoresets(convert=False, autoreset=False)
+
+ def testWriteAndConvertWritesPlainText(self):
+ stream = AnsiToWin32(Mock())
+ stream.write_and_convert( 'abc' )
+ self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) )
+
+ def testWriteAndConvertStripsAllValidAnsi(self):
+ stream = AnsiToWin32(Mock())
+ stream.call_win32 = Mock()
+ data = [
+ 'abc\033[mdef',
+ 'abc\033[0mdef',
+ 'abc\033[2mdef',
+ 'abc\033[02mdef',
+ 'abc\033[002mdef',
+ 'abc\033[40mdef',
+ 'abc\033[040mdef',
+ 'abc\033[0;1mdef',
+ 'abc\033[40;50mdef',
+ 'abc\033[50;30;40mdef',
+ 'abc\033[Adef',
+ 'abc\033[0Gdef',
+ 'abc\033[1;20;128Hdef',
+ ]
+ for datum in data:
+ stream.wrapped.write.reset_mock()
+ stream.write_and_convert( datum )
+ self.assertEqual(
+ [args[0] for args in stream.wrapped.write.call_args_list],
+ [ ('abc',), ('def',) ]
+ )
+
+ def testWriteAndConvertSkipsEmptySnippets(self):
+ stream = AnsiToWin32(Mock())
+ stream.call_win32 = Mock()
+ stream.write_and_convert( '\033[40m\033[41m' )
+ self.assertFalse( stream.wrapped.write.called )
+
+ def testWriteAndConvertCallsWin32WithParamsAndCommand(self):
+ stream = AnsiToWin32(Mock())
+ stream.convert = True
+ stream.call_win32 = Mock()
+ stream.extract_params = Mock(return_value='params')
+ data = {
+ 'abc\033[adef': ('a', 'params'),
+ 'abc\033[;;bdef': ('b', 'params'),
+ 'abc\033[0cdef': ('c', 'params'),
+ 'abc\033[;;0;;Gdef': ('G', 'params'),
+ 'abc\033[1;20;128Hdef': ('H', 'params'),
+ }
+ for datum, expected in data.items():
+ stream.call_win32.reset_mock()
+ stream.write_and_convert( datum )
+ self.assertEqual( stream.call_win32.call_args[0], expected )
+
+ def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self):
+ stream = StringIO()
+ converter = AnsiToWin32(stream)
+ stream.close()
+
+ converter.reset_all()
+
+ def test_wrap_shouldnt_raise_on_closed_orig_stdout(self):
+ stream = StringIO()
+ stream.close()
+ with \
+ patch("colorama.ansitowin32.os.name", "nt"), \
+ patch("colorama.ansitowin32.winapi_test", lambda: True):
+ converter = AnsiToWin32(stream)
+ self.assertTrue(converter.strip)
+ self.assertFalse(converter.convert)
+
+ def test_wrap_shouldnt_raise_on_missing_closed_attr(self):
+ with \
+ patch("colorama.ansitowin32.os.name", "nt"), \
+ patch("colorama.ansitowin32.winapi_test", lambda: True):
+ converter = AnsiToWin32(object())
+ self.assertTrue(converter.strip)
+ self.assertFalse(converter.convert)
+
+ def testExtractParams(self):
+ stream = AnsiToWin32(Mock())
+ data = {
+ '': (0,),
+ ';;': (0,),
+ '2': (2,),
+ ';;002;;': (2,),
+ '0;1': (0, 1),
+ ';;003;;456;;': (3, 456),
+ '11;22;33;44;55': (11, 22, 33, 44, 55),
+ }
+ for datum, expected in data.items():
+ self.assertEqual(stream.extract_params('m', datum), expected)
+
+ def testCallWin32UsesLookup(self):
+ listener = Mock()
+ stream = AnsiToWin32(listener)
+ stream.win32_calls = {
+ 1: (lambda *_, **__: listener(11),),
+ 2: (lambda *_, **__: listener(22),),
+ 3: (lambda *_, **__: listener(33),),
+ }
+ stream.call_win32('m', (3, 1, 99, 2))
+ self.assertEqual(
+ [a[0][0] for a in listener.call_args_list],
+ [33, 11, 22] )
+
+ def test_osc_codes(self):
+ mockStdout = Mock()
+ stream = AnsiToWin32(mockStdout, convert=True)
+ with patch('colorama.ansitowin32.winterm') as winterm:
+ data = [
+ '\033]0\x07', # missing arguments
+ '\033]0;foo\x08', # wrong OSC command
+ '\033]0;colorama_test_title\x07', # should work
+ '\033]1;colorama_test_title\x07', # wrong set command
+ '\033]2;colorama_test_title\x07', # should work
+ '\033]' + ';' * 64 + '\x08', # see issue #247
+ ]
+ for code in data:
+ stream.write(code)
+ self.assertEqual(winterm.set_title.call_count, 2)
+
+ def test_native_windows_ansi(self):
+ with ExitStack() as stack:
+ def p(a, b):
+ stack.enter_context(patch(a, b, create=True))
+ # Pretend to be on Windows
+ p("colorama.ansitowin32.os.name", "nt")
+ p("colorama.ansitowin32.winapi_test", lambda: True)
+ p("colorama.win32.winapi_test", lambda: True)
+ p("colorama.winterm.win32.windll", "non-None")
+ p("colorama.winterm.get_osfhandle", lambda _: 1234)
+
+ # Pretend that our mock stream has native ANSI support
+ p(
+ "colorama.winterm.win32.GetConsoleMode",
+ lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+ )
+ SetConsoleMode = Mock()
+ p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode)
+
+ stdout = Mock()
+ stdout.closed = False
+ stdout.isatty.return_value = True
+ stdout.fileno.return_value = 1
+
+ # Our fake console says it has native vt support, so AnsiToWin32 should
+ # enable that support and do nothing else.
+ stream = AnsiToWin32(stdout)
+ SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+ self.assertFalse(stream.strip)
+ self.assertFalse(stream.convert)
+ self.assertFalse(stream.should_wrap())
+
+ # Now let's pretend we're on an old Windows console, that doesn't have
+ # native ANSI support.
+ p("colorama.winterm.win32.GetConsoleMode", lambda _: 0)
+ SetConsoleMode = Mock()
+ p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode)
+
+ stream = AnsiToWin32(stdout)
+ SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING)
+ self.assertTrue(stream.strip)
+ self.assertTrue(stream.convert)
+ self.assertTrue(stream.should_wrap())
+
+
+if __name__ == '__main__':
+ main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/initialise_test.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/initialise_test.py"
new file mode 100644
index 0000000..89f9b07
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/initialise_test.py"
@@ -0,0 +1,189 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main, skipUnless
+
+try:
+ from unittest.mock import patch, Mock
+except ImportError:
+ from mock import patch, Mock
+
+from ..ansitowin32 import StreamWrapper
+from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests
+from .utils import osname, replace_by
+
+orig_stdout = sys.stdout
+orig_stderr = sys.stderr
+
+
+class InitTest(TestCase):
+
+ @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty")
+ def setUp(self):
+ # sanity check
+ self.assertNotWrapped()
+
+ def tearDown(self):
+ _wipe_internal_state_for_tests()
+ sys.stdout = orig_stdout
+ sys.stderr = orig_stderr
+
+ def assertWrapped(self):
+ self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped')
+ self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped')
+ self.assertTrue(isinstance(sys.stdout, StreamWrapper),
+ 'bad stdout wrapper')
+ self.assertTrue(isinstance(sys.stderr, StreamWrapper),
+ 'bad stderr wrapper')
+
+ def assertNotWrapped(self):
+ self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped')
+ self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped')
+
+ @patch('colorama.initialise.reset_all')
+ @patch('colorama.ansitowin32.winapi_test', lambda *_: True)
+ @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False)
+ def testInitWrapsOnWindows(self, _):
+ with osname("nt"):
+ init()
+ self.assertWrapped()
+
+ @patch('colorama.initialise.reset_all')
+ @patch('colorama.ansitowin32.winapi_test', lambda *_: False)
+ def testInitDoesntWrapOnEmulatedWindows(self, _):
+ with osname("nt"):
+ init()
+ self.assertNotWrapped()
+
+ def testInitDoesntWrapOnNonWindows(self):
+ with osname("posix"):
+ init()
+ self.assertNotWrapped()
+
+ def testInitDoesntWrapIfNone(self):
+ with replace_by(None):
+ init()
+ # We can't use assertNotWrapped here because replace_by(None)
+ # changes stdout/stderr already.
+ self.assertIsNone(sys.stdout)
+ self.assertIsNone(sys.stderr)
+
+ def testInitAutoresetOnWrapsOnAllPlatforms(self):
+ with osname("posix"):
+ init(autoreset=True)
+ self.assertWrapped()
+
+ def testInitWrapOffDoesntWrapOnWindows(self):
+ with osname("nt"):
+ init(wrap=False)
+ self.assertNotWrapped()
+
+ def testInitWrapOffIncompatibleWithAutoresetOn(self):
+ self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False))
+
+ @patch('colorama.win32.SetConsoleTextAttribute')
+ @patch('colorama.initialise.AnsiToWin32')
+ def testAutoResetPassedOn(self, mockATW32, _):
+ with osname("nt"):
+ init(autoreset=True)
+ self.assertEqual(len(mockATW32.call_args_list), 2)
+ self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True)
+ self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True)
+
+ @patch('colorama.initialise.AnsiToWin32')
+ def testAutoResetChangeable(self, mockATW32):
+ with osname("nt"):
+ init()
+
+ init(autoreset=True)
+ self.assertEqual(len(mockATW32.call_args_list), 4)
+ self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True)
+ self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True)
+
+ init()
+ self.assertEqual(len(mockATW32.call_args_list), 6)
+ self.assertEqual(
+ mockATW32.call_args_list[4][1]['autoreset'], False)
+ self.assertEqual(
+ mockATW32.call_args_list[5][1]['autoreset'], False)
+
+
+ @patch('colorama.initialise.atexit.register')
+ def testAtexitRegisteredOnlyOnce(self, mockRegister):
+ init()
+ self.assertTrue(mockRegister.called)
+ mockRegister.reset_mock()
+ init()
+ self.assertFalse(mockRegister.called)
+
+
+class JustFixWindowsConsoleTest(TestCase):
+ def _reset(self):
+ _wipe_internal_state_for_tests()
+ sys.stdout = orig_stdout
+ sys.stderr = orig_stderr
+
+ def tearDown(self):
+ self._reset()
+
+ @patch("colorama.ansitowin32.winapi_test", lambda: True)
+ def testJustFixWindowsConsole(self):
+ if sys.platform != "win32":
+ # just_fix_windows_console should be a no-op
+ just_fix_windows_console()
+ self.assertIs(sys.stdout, orig_stdout)
+ self.assertIs(sys.stderr, orig_stderr)
+ else:
+ def fake_std():
+ # Emulate stdout=not a tty, stderr=tty
+ # to check that we handle both cases correctly
+ stdout = Mock()
+ stdout.closed = False
+ stdout.isatty.return_value = False
+ stdout.fileno.return_value = 1
+ sys.stdout = stdout
+
+ stderr = Mock()
+ stderr.closed = False
+ stderr.isatty.return_value = True
+ stderr.fileno.return_value = 2
+ sys.stderr = stderr
+
+ for native_ansi in [False, True]:
+ with patch(
+ 'colorama.ansitowin32.enable_vt_processing',
+ lambda *_: native_ansi
+ ):
+ self._reset()
+ fake_std()
+
+ # Regular single-call test
+ prev_stdout = sys.stdout
+ prev_stderr = sys.stderr
+ just_fix_windows_console()
+ self.assertIs(sys.stdout, prev_stdout)
+ if native_ansi:
+ self.assertIs(sys.stderr, prev_stderr)
+ else:
+ self.assertIsNot(sys.stderr, prev_stderr)
+
+ # second call without resetting is always a no-op
+ prev_stdout = sys.stdout
+ prev_stderr = sys.stderr
+ just_fix_windows_console()
+ self.assertIs(sys.stdout, prev_stdout)
+ self.assertIs(sys.stderr, prev_stderr)
+
+ self._reset()
+ fake_std()
+
+ # If init() runs first, just_fix_windows_console should be a no-op
+ init()
+ prev_stdout = sys.stdout
+ prev_stderr = sys.stderr
+ just_fix_windows_console()
+ self.assertIs(prev_stdout, sys.stdout)
+ self.assertIs(prev_stderr, sys.stderr)
+
+
+if __name__ == '__main__':
+ main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/isatty_test.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/isatty_test.py"
new file mode 100644
index 0000000..0f84e4b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/isatty_test.py"
@@ -0,0 +1,57 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main
+
+from ..ansitowin32 import StreamWrapper, AnsiToWin32
+from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY
+
+
+def is_a_tty(stream):
+ return StreamWrapper(stream, None).isatty()
+
+class IsattyTest(TestCase):
+
+ def test_TTY(self):
+ tty = StreamTTY()
+ self.assertTrue(is_a_tty(tty))
+ with pycharm():
+ self.assertTrue(is_a_tty(tty))
+
+ def test_nonTTY(self):
+ non_tty = StreamNonTTY()
+ self.assertFalse(is_a_tty(non_tty))
+ with pycharm():
+ self.assertFalse(is_a_tty(non_tty))
+
+ def test_withPycharm(self):
+ with pycharm():
+ self.assertTrue(is_a_tty(sys.stderr))
+ self.assertTrue(is_a_tty(sys.stdout))
+
+ def test_withPycharmTTYOverride(self):
+ tty = StreamTTY()
+ with pycharm(), replace_by(tty):
+ self.assertTrue(is_a_tty(tty))
+
+ def test_withPycharmNonTTYOverride(self):
+ non_tty = StreamNonTTY()
+ with pycharm(), replace_by(non_tty):
+ self.assertFalse(is_a_tty(non_tty))
+
+ def test_withPycharmNoneOverride(self):
+ with pycharm():
+ with replace_by(None), replace_original_by(None):
+ self.assertFalse(is_a_tty(None))
+ self.assertFalse(is_a_tty(StreamNonTTY()))
+ self.assertTrue(is_a_tty(StreamTTY()))
+
+ def test_withPycharmStreamWrapped(self):
+ with pycharm():
+ self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty())
+ self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty())
+ self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty())
+ self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty())
+
+
+if __name__ == '__main__':
+ main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/utils.py"
new file mode 100644
index 0000000..472fafb
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/utils.py"
@@ -0,0 +1,49 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from contextlib import contextmanager
+from io import StringIO
+import sys
+import os
+
+
+class StreamTTY(StringIO):
+ def isatty(self):
+ return True
+
+class StreamNonTTY(StringIO):
+ def isatty(self):
+ return False
+
+@contextmanager
+def osname(name):
+ orig = os.name
+ os.name = name
+ yield
+ os.name = orig
+
+@contextmanager
+def replace_by(stream):
+ orig_stdout = sys.stdout
+ orig_stderr = sys.stderr
+ sys.stdout = stream
+ sys.stderr = stream
+ yield
+ sys.stdout = orig_stdout
+ sys.stderr = orig_stderr
+
+@contextmanager
+def replace_original_by(stream):
+ orig_stdout = sys.__stdout__
+ orig_stderr = sys.__stderr__
+ sys.__stdout__ = stream
+ sys.__stderr__ = stream
+ yield
+ sys.__stdout__ = orig_stdout
+ sys.__stderr__ = orig_stderr
+
+@contextmanager
+def pycharm():
+ os.environ["PYCHARM_HOSTED"] = "1"
+ non_tty = StreamNonTTY()
+ with replace_by(non_tty), replace_original_by(non_tty):
+ yield
+ del os.environ["PYCHARM_HOSTED"]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/winterm_test.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/winterm_test.py"
new file mode 100644
index 0000000..d0955f9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/tests/winterm_test.py"
@@ -0,0 +1,131 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import sys
+from unittest import TestCase, main, skipUnless
+
+try:
+ from unittest.mock import Mock, patch
+except ImportError:
+ from mock import Mock, patch
+
+from ..winterm import WinColor, WinStyle, WinTerm
+
+
+class WinTermTest(TestCase):
+
+ @patch('colorama.winterm.win32')
+ def testInit(self, mockWin32):
+ mockAttr = Mock()
+ mockAttr.wAttributes = 7 + 6 * 16 + 8
+ mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+ term = WinTerm()
+ self.assertEqual(term._fore, 7)
+ self.assertEqual(term._back, 6)
+ self.assertEqual(term._style, 8)
+
+ @skipUnless(sys.platform.startswith("win"), "requires Windows")
+ def testGetAttrs(self):
+ term = WinTerm()
+
+ term._fore = 0
+ term._back = 0
+ term._style = 0
+ self.assertEqual(term.get_attrs(), 0)
+
+ term._fore = WinColor.YELLOW
+ self.assertEqual(term.get_attrs(), WinColor.YELLOW)
+
+ term._back = WinColor.MAGENTA
+ self.assertEqual(
+ term.get_attrs(),
+ WinColor.YELLOW + WinColor.MAGENTA * 16)
+
+ term._style = WinStyle.BRIGHT
+ self.assertEqual(
+ term.get_attrs(),
+ WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT)
+
+ @patch('colorama.winterm.win32')
+ def testResetAll(self, mockWin32):
+ mockAttr = Mock()
+ mockAttr.wAttributes = 1 + 2 * 16 + 8
+ mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+ term = WinTerm()
+
+ term.set_console = Mock()
+ term._fore = -1
+ term._back = -1
+ term._style = -1
+
+ term.reset_all()
+
+ self.assertEqual(term._fore, 1)
+ self.assertEqual(term._back, 2)
+ self.assertEqual(term._style, 8)
+ self.assertEqual(term.set_console.called, True)
+
+ @skipUnless(sys.platform.startswith("win"), "requires Windows")
+ def testFore(self):
+ term = WinTerm()
+ term.set_console = Mock()
+ term._fore = 0
+
+ term.fore(5)
+
+ self.assertEqual(term._fore, 5)
+ self.assertEqual(term.set_console.called, True)
+
+ @skipUnless(sys.platform.startswith("win"), "requires Windows")
+ def testBack(self):
+ term = WinTerm()
+ term.set_console = Mock()
+ term._back = 0
+
+ term.back(5)
+
+ self.assertEqual(term._back, 5)
+ self.assertEqual(term.set_console.called, True)
+
+ @skipUnless(sys.platform.startswith("win"), "requires Windows")
+ def testStyle(self):
+ term = WinTerm()
+ term.set_console = Mock()
+ term._style = 0
+
+ term.style(22)
+
+ self.assertEqual(term._style, 22)
+ self.assertEqual(term.set_console.called, True)
+
+ @patch('colorama.winterm.win32')
+ def testSetConsole(self, mockWin32):
+ mockAttr = Mock()
+ mockAttr.wAttributes = 0
+ mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+ term = WinTerm()
+ term.windll = Mock()
+
+ term.set_console()
+
+ self.assertEqual(
+ mockWin32.SetConsoleTextAttribute.call_args,
+ ((mockWin32.STDOUT, term.get_attrs()), {})
+ )
+
+ @patch('colorama.winterm.win32')
+ def testSetConsoleOnStderr(self, mockWin32):
+ mockAttr = Mock()
+ mockAttr.wAttributes = 0
+ mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr
+ term = WinTerm()
+ term.windll = Mock()
+
+ term.set_console(on_stderr=True)
+
+ self.assertEqual(
+ mockWin32.SetConsoleTextAttribute.call_args,
+ ((mockWin32.STDERR, term.get_attrs()), {})
+ )
+
+
+if __name__ == '__main__':
+ main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/win32.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/win32.py"
new file mode 100644
index 0000000..841b0e2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/win32.py"
@@ -0,0 +1,180 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+
+# from winbase.h
+STDOUT = -11
+STDERR = -12
+
+ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
+
+try:
+ import ctypes
+ from ctypes import LibraryLoader
+ windll = LibraryLoader(ctypes.WinDLL)
+ from ctypes import wintypes
+except (AttributeError, ImportError):
+ windll = None
+ SetConsoleTextAttribute = lambda *_: None
+ winapi_test = lambda *_: None
+else:
+ from ctypes import byref, Structure, c_char, POINTER
+
+ COORD = wintypes._COORD
+
+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+ """struct in wincon.h."""
+ _fields_ = [
+ ("dwSize", COORD),
+ ("dwCursorPosition", COORD),
+ ("wAttributes", wintypes.WORD),
+ ("srWindow", wintypes.SMALL_RECT),
+ ("dwMaximumWindowSize", COORD),
+ ]
+ def __str__(self):
+ return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
+ self.dwSize.Y, self.dwSize.X
+ , self.dwCursorPosition.Y, self.dwCursorPosition.X
+ , self.wAttributes
+ , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
+ , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
+ )
+
+ _GetStdHandle = windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [
+ wintypes.DWORD,
+ ]
+ _GetStdHandle.restype = wintypes.HANDLE
+
+ _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [
+ wintypes.HANDLE,
+ POINTER(CONSOLE_SCREEN_BUFFER_INFO),
+ ]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+
+ _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
+ _SetConsoleTextAttribute.argtypes = [
+ wintypes.HANDLE,
+ wintypes.WORD,
+ ]
+ _SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
+ _SetConsoleCursorPosition.argtypes = [
+ wintypes.HANDLE,
+ COORD,
+ ]
+ _SetConsoleCursorPosition.restype = wintypes.BOOL
+
+ _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
+ _FillConsoleOutputCharacterA.argtypes = [
+ wintypes.HANDLE,
+ c_char,
+ wintypes.DWORD,
+ COORD,
+ POINTER(wintypes.DWORD),
+ ]
+ _FillConsoleOutputCharacterA.restype = wintypes.BOOL
+
+ _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
+ _FillConsoleOutputAttribute.argtypes = [
+ wintypes.HANDLE,
+ wintypes.WORD,
+ wintypes.DWORD,
+ COORD,
+ POINTER(wintypes.DWORD),
+ ]
+ _FillConsoleOutputAttribute.restype = wintypes.BOOL
+
+ _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW
+ _SetConsoleTitleW.argtypes = [
+ wintypes.LPCWSTR
+ ]
+ _SetConsoleTitleW.restype = wintypes.BOOL
+
+ _GetConsoleMode = windll.kernel32.GetConsoleMode
+ _GetConsoleMode.argtypes = [
+ wintypes.HANDLE,
+ POINTER(wintypes.DWORD)
+ ]
+ _GetConsoleMode.restype = wintypes.BOOL
+
+ _SetConsoleMode = windll.kernel32.SetConsoleMode
+ _SetConsoleMode.argtypes = [
+ wintypes.HANDLE,
+ wintypes.DWORD
+ ]
+ _SetConsoleMode.restype = wintypes.BOOL
+
+ def _winapi_test(handle):
+ csbi = CONSOLE_SCREEN_BUFFER_INFO()
+ success = _GetConsoleScreenBufferInfo(
+ handle, byref(csbi))
+ return bool(success)
+
+ def winapi_test():
+ return any(_winapi_test(h) for h in
+ (_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
+
+ def GetConsoleScreenBufferInfo(stream_id=STDOUT):
+ handle = _GetStdHandle(stream_id)
+ csbi = CONSOLE_SCREEN_BUFFER_INFO()
+ success = _GetConsoleScreenBufferInfo(
+ handle, byref(csbi))
+ return csbi
+
+ def SetConsoleTextAttribute(stream_id, attrs):
+ handle = _GetStdHandle(stream_id)
+ return _SetConsoleTextAttribute(handle, attrs)
+
+ def SetConsoleCursorPosition(stream_id, position, adjust=True):
+ position = COORD(*position)
+ # If the position is out of range, do nothing.
+ if position.Y <= 0 or position.X <= 0:
+ return
+ # Adjust for Windows' SetConsoleCursorPosition:
+ # 1. being 0-based, while ANSI is 1-based.
+ # 2. expecting (x,y), while ANSI uses (y,x).
+ adjusted_position = COORD(position.Y - 1, position.X - 1)
+ if adjust:
+ # Adjust for viewport's scroll position
+ sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
+ adjusted_position.Y += sr.Top
+ adjusted_position.X += sr.Left
+ # Resume normal processing
+ handle = _GetStdHandle(stream_id)
+ return _SetConsoleCursorPosition(handle, adjusted_position)
+
+ def FillConsoleOutputCharacter(stream_id, char, length, start):
+ handle = _GetStdHandle(stream_id)
+ char = c_char(char.encode())
+ length = wintypes.DWORD(length)
+ num_written = wintypes.DWORD(0)
+ # Note that this is hard-coded for ANSI (vs wide) bytes.
+ success = _FillConsoleOutputCharacterA(
+ handle, char, length, start, byref(num_written))
+ return num_written.value
+
+ def FillConsoleOutputAttribute(stream_id, attr, length, start):
+ ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
+ handle = _GetStdHandle(stream_id)
+ attribute = wintypes.WORD(attr)
+ length = wintypes.DWORD(length)
+ num_written = wintypes.DWORD(0)
+ # Note that this is hard-coded for ANSI (vs wide) bytes.
+ return _FillConsoleOutputAttribute(
+ handle, attribute, length, start, byref(num_written))
+
+ def SetConsoleTitle(title):
+ return _SetConsoleTitleW(title)
+
+ def GetConsoleMode(handle):
+ mode = wintypes.DWORD()
+ success = _GetConsoleMode(handle, byref(mode))
+ if not success:
+ raise ctypes.WinError()
+ return mode.value
+
+ def SetConsoleMode(handle, mode):
+ success = _SetConsoleMode(handle, mode)
+ if not success:
+ raise ctypes.WinError()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/winterm.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/winterm.py"
new file mode 100644
index 0000000..aad867e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/colorama/winterm.py"
@@ -0,0 +1,195 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+try:
+ from msvcrt import get_osfhandle
+except ImportError:
+ def get_osfhandle(_):
+ raise OSError("This isn't windows!")
+
+
+from . import win32
+
+# from wincon.h
+class WinColor(object):
+ BLACK = 0
+ BLUE = 1
+ GREEN = 2
+ CYAN = 3
+ RED = 4
+ MAGENTA = 5
+ YELLOW = 6
+ GREY = 7
+
+# from wincon.h
+class WinStyle(object):
+ NORMAL = 0x00 # dim text, dim background
+ BRIGHT = 0x08 # bright text, dim background
+ BRIGHT_BACKGROUND = 0x80 # dim text, bright background
+
+class WinTerm(object):
+
+ def __init__(self):
+ self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
+ self.set_attrs(self._default)
+ self._default_fore = self._fore
+ self._default_back = self._back
+ self._default_style = self._style
+ # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
+ # So that LIGHT_EX colors and BRIGHT style do not clobber each other,
+ # we track them separately, since LIGHT_EX is overwritten by Fore/Back
+ # and BRIGHT is overwritten by Style codes.
+ self._light = 0
+
+ def get_attrs(self):
+ return self._fore + self._back * 16 + (self._style | self._light)
+
+ def set_attrs(self, value):
+ self._fore = value & 7
+ self._back = (value >> 4) & 7
+ self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
+
+ def reset_all(self, on_stderr=None):
+ self.set_attrs(self._default)
+ self.set_console(attrs=self._default)
+ self._light = 0
+
+ def fore(self, fore=None, light=False, on_stderr=False):
+ if fore is None:
+ fore = self._default_fore
+ self._fore = fore
+ # Emulate LIGHT_EX with BRIGHT Style
+ if light:
+ self._light |= WinStyle.BRIGHT
+ else:
+ self._light &= ~WinStyle.BRIGHT
+ self.set_console(on_stderr=on_stderr)
+
+ def back(self, back=None, light=False, on_stderr=False):
+ if back is None:
+ back = self._default_back
+ self._back = back
+ # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
+ if light:
+ self._light |= WinStyle.BRIGHT_BACKGROUND
+ else:
+ self._light &= ~WinStyle.BRIGHT_BACKGROUND
+ self.set_console(on_stderr=on_stderr)
+
+ def style(self, style=None, on_stderr=False):
+ if style is None:
+ style = self._default_style
+ self._style = style
+ self.set_console(on_stderr=on_stderr)
+
+ def set_console(self, attrs=None, on_stderr=False):
+ if attrs is None:
+ attrs = self.get_attrs()
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ win32.SetConsoleTextAttribute(handle, attrs)
+
+ def get_position(self, handle):
+ position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
+ # Because Windows coordinates are 0-based,
+ # and win32.SetConsoleCursorPosition expects 1-based.
+ position.X += 1
+ position.Y += 1
+ return position
+
+ def set_cursor_position(self, position=None, on_stderr=False):
+ if position is None:
+ # I'm not currently tracking the position, so there is no default.
+ # position = self.get_position()
+ return
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ win32.SetConsoleCursorPosition(handle, position)
+
+ def cursor_adjust(self, x, y, on_stderr=False):
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ position = self.get_position(handle)
+ adjusted_position = (position.Y + y, position.X + x)
+ win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
+
+ def erase_screen(self, mode=0, on_stderr=False):
+ # 0 should clear from the cursor to the end of the screen.
+ # 1 should clear from the cursor to the beginning of the screen.
+ # 2 should clear the entire screen, and move cursor to (1,1)
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ csbi = win32.GetConsoleScreenBufferInfo(handle)
+ # get the number of character cells in the current buffer
+ cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
+ # get number of character cells before current cursor position
+ cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
+ if mode == 0:
+ from_coord = csbi.dwCursorPosition
+ cells_to_erase = cells_in_screen - cells_before_cursor
+ elif mode == 1:
+ from_coord = win32.COORD(0, 0)
+ cells_to_erase = cells_before_cursor
+ elif mode == 2:
+ from_coord = win32.COORD(0, 0)
+ cells_to_erase = cells_in_screen
+ else:
+ # invalid mode
+ return
+ # fill the entire screen with blanks
+ win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+ # now set the buffer's attributes accordingly
+ win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+ if mode == 2:
+ # put the cursor where needed
+ win32.SetConsoleCursorPosition(handle, (1, 1))
+
+ def erase_line(self, mode=0, on_stderr=False):
+ # 0 should clear from the cursor to the end of the line.
+ # 1 should clear from the cursor to the beginning of the line.
+ # 2 should clear the entire line.
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ csbi = win32.GetConsoleScreenBufferInfo(handle)
+ if mode == 0:
+ from_coord = csbi.dwCursorPosition
+ cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
+ elif mode == 1:
+ from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+ cells_to_erase = csbi.dwCursorPosition.X
+ elif mode == 2:
+ from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+ cells_to_erase = csbi.dwSize.X
+ else:
+ # invalid mode
+ return
+ # fill the entire screen with blanks
+ win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+ # now set the buffer's attributes accordingly
+ win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+
+ def set_title(self, title):
+ win32.SetConsoleTitle(title)
+
+
+def enable_vt_processing(fd):
+ if win32.windll is None or not win32.winapi_test():
+ return False
+
+ try:
+ handle = get_osfhandle(fd)
+ mode = win32.GetConsoleMode(handle)
+ win32.SetConsoleMode(
+ handle,
+ mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING,
+ )
+
+ mode = win32.GetConsoleMode(handle)
+ if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING:
+ return True
+ # Can get TypeError in testsuite where 'fd' is a Mock()
+ except (OSError, TypeError):
+ return False
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA"
new file mode 100644
index 0000000..30d6653
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/METADATA"
@@ -0,0 +1,672 @@
+Metadata-Version: 2.4
+Name: frozenlist
+Version: 1.8.0
+Summary: A list-like structure which implements collections.abc.MutableSequence
+Home-page: https://github.com/aio-libs/frozenlist
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache-2.0
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: Github Actions, https://github.com/aio-libs/frozenlist/actions
+Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/frozenlist
+Project-URL: Docs: Changelog, https://github.com/aio-libs/frozenlist/blob/master/CHANGES.rst#changelog
+Project-URL: Docs: RTD, https://frozenlist.aio-libs.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/frozenlist/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/frozenlist
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Cython
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Dynamic: license-file
+
+frozenlist
+==========
+
+.. image:: https://github.com/aio-libs/frozenlist/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/frozenlist/actions
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/frozenlist/branch/master/graph/badge.svg?flag=pytest
+ :target: https://codecov.io/gh/aio-libs/frozenlist?flags[]=pytest
+ :alt: codecov.io status for master branch
+
+.. image:: https://img.shields.io/pypi/v/frozenlist.svg?logo=Python&logoColor=white
+ :target: https://pypi.org/project/frozenlist
+ :alt: frozenlist @ PyPI
+
+.. image:: https://readthedocs.org/projects/frozenlist/badge/?version=latest
+ :target: https://frozenlist.aio-libs.org
+ :alt: Read The Docs build status badge
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Introduction
+------------
+
+``frozenlist.FrozenList`` is a list-like structure which implements
+``collections.abc.MutableSequence``. The list is *mutable* until ``FrozenList.freeze``
+is called, after which list modifications raise ``RuntimeError``:
+
+
+>>> from frozenlist import FrozenList
+>>> fl = FrozenList([17, 42])
+>>> fl.append('spam')
+>>> fl.append('Vikings')
+>>> fl
+<FrozenList(frozen=False, [17, 42, 'spam', 'Vikings'])>
+>>> fl.freeze()
+>>> fl
+<FrozenList(frozen=True, [17, 42, 'spam', 'Vikings'])>
+>>> fl.frozen
+True
+>>> fl.append("Monty")
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "frozenlist/_frozenlist.pyx", line 97, in frozenlist._frozenlist.FrozenList.append
+ self._check_frozen()
+ File "frozenlist/_frozenlist.pyx", line 19, in frozenlist._frozenlist.FrozenList._check_frozen
+ raise RuntimeError("Cannot modify frozen list.")
+RuntimeError: Cannot modify frozen list.
+
+
+FrozenList is also hashable, but only when frozen. Otherwise it also throws a RuntimeError:
+
+
+>>> fl = FrozenList([17, 42, 'spam'])
+>>> hash(fl)
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "frozenlist/_frozenlist.pyx", line 111, in frozenlist._frozenlist.FrozenList.__hash__
+ raise RuntimeError("Cannot hash unfrozen list.")
+RuntimeError: Cannot hash unfrozen list.
+>>> fl.freeze()
+>>> hash(fl)
+3713081631934410656
+>>> dictionary = {fl: 'Vikings'} # frozen fl can be a dict key
+>>> dictionary
+{<FrozenList(frozen=True, [1, 2])>: 'Vikings'}
+
+
+Installation
+------------
+
+::
+
+ $ pip install frozenlist
+
+
+Documentation
+-------------
+
+https://frozenlist.aio-libs.org
+
+Communication channels
+----------------------
+
+We have a *Matrix Space* `#aio-libs-space:matrix.org
+<https://matrix.to/#/%23aio-libs-space:matrix.org>`_ which is
+also accessible via Gitter.
+
+License
+-------
+
+``frozenlist`` is offered under the Apache 2 license.
+
+Source code
+-----------
+
+The project is hosted on GitHub_
+
+Please file an issue in the `bug tracker
+<https://github.com/aio-libs/frozenlist/issues>`_ if you have found a bug
+or have some suggestions to improve the library.
+
+.. _GitHub: https://github.com/aio-libs/frozenlist
+
+=========
+Changelog
+=========
+
+..
+ You should *NOT* be adding new change log entries to this file, this
+ file is managed by towncrier. You *may* edit previous change logs to
+ fix problems like typo corrections or such.
+ To add a new change log entry, please see
+ https://pip.pypa.io/en/latest/development/contributing/#news-entries
+ we named the news folder "changes".
+
+ WARNING: Don't drop the next directive!
+
+.. towncrier release notes start
+
+v1.8.0
+======
+
+*(2025-10-05)*
+
+
+Contributor-facing changes
+--------------------------
+
+- The ``reusable-cibuildwheel.yml`` workflow has been refactored to
+ be more generic and ``ci-cd.yml`` now holds all the configuration
+ toggles -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#668 <https://github.com/aio-libs/frozenlist/issues/668>`__.
+
+- When building wheels, the source distribution is now passed directly
+ to the ``cibuildwheel`` invocation -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#669 <https://github.com/aio-libs/frozenlist/issues/669>`__.
+
+- Builds and tests have been added to
+ ``ci-cd.yml`` for arm64 Windows wheels -- by `@finnagin <https://github.com/sponsors/finnagin>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#677 <https://github.com/aio-libs/frozenlist/issues/677>`__.
+
+- Started building wheels for CPython 3.14 -- by `@kumaraditya303 <https://github.com/sponsors/kumaraditya303>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#681 <https://github.com/aio-libs/frozenlist/issues/681>`__, `#682 <https://github.com/aio-libs/frozenlist/issues/682>`__.
+
+- Removed ``--config-settings=pure-python=false`` from ``requirements/dev.txt``.
+ Developers on CPython still get accelerated builds by default. To explicitly build
+ a pure Python wheel, use ``pip install -e . --config-settings=pure-python=true``
+ -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#687 <https://github.com/aio-libs/frozenlist/issues/687>`__.
+
+
+----
+
+
+v1.7.0
+======
+
+*(2025-06-09)*
+
+
+Features
+--------
+
+- Added deepcopy support to FrozenList -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#659 <https://github.com/aio-libs/frozenlist/issues/659>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Fixed an issue where ``frozenlist`` binary wheels would be built with debugging symbols and line tracing enabled, which significantly impacted performance. Line tracing is now disabled by default and can only be enabled explicitly -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ This change ensures that production builds are optimized for performance. Developers who need line tracing for debugging purposes can still enable it by:
+
+ 1. Setting the ``FROZENLIST_CYTHON_TRACING`` environment variable
+ 2. Using the ``--config-setting=with-cython-tracing=true`` option with pip
+
+ *Related issues and pull requests on GitHub:*
+ `#660 <https://github.com/aio-libs/frozenlist/issues/660>`__.
+
+- Enabled ``PIP_CONSTRAINT`` environment variable in the build configuration to ensure the pinned Cython version from ``requirements/cython.txt`` is used during wheel builds.
+
+ *Related issues and pull requests on GitHub:*
+ `#661 <https://github.com/aio-libs/frozenlist/issues/661>`__.
+
+
+----
+
+
+v1.6.2
+======
+
+*(2025-06-03)*
+
+
+No significant changes.
+
+
+----
+
+
+v1.6.1
+======
+
+*(2025-06-02)*
+
+
+Bug fixes
+---------
+
+- Correctly use ``cimport`` for including ``PyBool_FromLong`` -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#653 <https://github.com/aio-libs/frozenlist/issues/653>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Exclude ``_frozenlist.cpp`` from bdists/wheels -- by `@musicinmybrain <https://github.com/sponsors/musicinmybrain>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#649 <https://github.com/aio-libs/frozenlist/issues/649>`__.
+
+- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#654 <https://github.com/aio-libs/frozenlist/issues/654>`__.
+
+
+----
+
+
+v1.6.0
+======
+
+*(2025-04-17)*
+
+
+Bug fixes
+---------
+
+- Stopped implicitly allowing the use of Cython pre-release versions when
+ building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and
+ `@markgreene74 <https://github.com/sponsors/markgreene74>`__.
+
+ *Related commits on GitHub:*
+ `41591f2 <https://github.com/aio-libs/frozenlist/commit/41591f2>`__.
+
+
+Features
+--------
+
+- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#618 <https://github.com/aio-libs/frozenlist/issues/618>`__.
+
+- Started building armv7l wheels -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#642 <https://github.com/aio-libs/frozenlist/issues/642>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Stopped implicitly allowing the use of Cython pre-release versions when
+ building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and
+ `@markgreene74 <https://github.com/sponsors/markgreene74>`__.
+
+ *Related commits on GitHub:*
+ `41591f2 <https://github.com/aio-libs/frozenlist/commit/41591f2>`__.
+
+- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#618 <https://github.com/aio-libs/frozenlist/issues/618>`__.
+
+- The packaging metadata switched to including an SPDX license identifier introduced in `PEP 639 <https://peps.python.org/pep-639>`__ -- by `@cdce8p <https://github.com/sponsors/cdce8p>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#639 <https://github.com/aio-libs/frozenlist/issues/639>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem
+ dependencies using `re-actors/cache-python-deps`_ -- an action by
+ `@webknjaz <https://github.com/sponsors/webknjaz>`__ that takes into account ABI stability and the exact
+ version of Python runtime.
+
+ .. _`re-actors/cache-python-deps`:
+ https://github.com/marketplace/actions/cache-python-deps
+
+ *Related issues and pull requests on GitHub:*
+ `#633 <https://github.com/aio-libs/frozenlist/issues/633>`__.
+
+- Organized dependencies into test and lint dependencies so that no
+ unnecessary ones are installed during CI runs -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#636 <https://github.com/aio-libs/frozenlist/issues/636>`__.
+
+
+----
+
+
+1.5.0 (2024-10-22)
+==================
+
+Bug fixes
+---------
+
+- An incorrect signature of the ``__class_getitem__`` class method
+ has been fixed, adding a missing ``class_item`` argument under
+ Python 3.8 and older.
+
+ This change also improves the code coverage of this method that
+ was previously missing -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#567 <https://github.com/aio-libs/frozenlist/issues/567>`__, `#571 <https://github.com/aio-libs/frozenlist/issues/571>`__.
+
+
+Improved documentation
+----------------------
+
+- Rendered issue, PR, and commit links now lead to
+ ``frozenlist``'s repo instead of ``yarl``'s repo.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#573 <https://github.com/aio-libs/frozenlist/issues/573>`__.
+
+- On the ``Contributing docs`` page,
+ a link to the ``Towncrier philosophy`` has been fixed.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#574 <https://github.com/aio-libs/frozenlist/issues/574>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- A name of a temporary building directory now reflects
+ that it's related to ``frozenlist``, not ``yarl``.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#573 <https://github.com/aio-libs/frozenlist/issues/573>`__.
+
+- Declared Python 3.13 supported officially in the distribution package metadata.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#595 <https://github.com/aio-libs/frozenlist/issues/595>`__.
+
+
+----
+
+
+1.4.1 (2023-12-15)
+==================
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Declared Python 3.12 and PyPy 3.8-3.10 supported officially
+ in the distribution package metadata.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#553 <https://github.com/aio-libs/frozenlist/issues/553>`__.
+
+- Replaced the packaging is replaced from an old-fashioned ``setup.py`` to an
+ in-tree `PEP 517 <https://peps.python.org/pep-517>`__ build backend -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ Whenever the end-users or downstream packagers need to build ``frozenlist``
+ from source (a Git checkout or an sdist), they may pass a ``config_settings``
+ flag ``pure-python``. If this flag is not set, a C-extension will be built
+ and included into the distribution.
+
+ Here is how this can be done with ``pip``:
+
+ .. code-block:: console
+
+ $ python3 -m pip install . --config-settings=pure-python=
+
+ This will also work with ``-e | --editable``.
+
+ The same can be achieved via ``pypa/build``:
+
+ .. code-block:: console
+
+ $ python3 -m build --config-setting=pure-python=
+
+ Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source
+ directly, as opposed to building an ``sdist`` and then building from it.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#560 <https://github.com/aio-libs/frozenlist/issues/560>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- It is now possible to request line tracing in Cython builds using the
+ ``with-cython-tracing`` `PEP 517 <https://peps.python.org/pep-517>`__ config setting
+ -- `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ This can be used in CI and development environment to measure coverage
+ on Cython modules, but is not normally useful to the end-users or
+ downstream packagers.
+
+ Here's a usage example:
+
+ .. code-block:: console
+
+ $ python3 -Im pip install . --config-settings=with-cython-tracing=true
+
+ For editable installs, this setting is on by default. Otherwise, it's
+ off unless requested explicitly.
+
+ The following produces C-files required for the Cython coverage
+ plugin to map the measurements back to the PYX-files:
+
+ .. code-block:: console
+
+ $ python -Im pip install -e .
+
+ Alternatively, the ``FROZENLIST_CYTHON_TRACING=1`` environment variable
+ can be set to do the same as the `PEP 517 <https://peps.python.org/pep-517>`__ config setting.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#560 <https://github.com/aio-libs/frozenlist/issues/560>`__.
+
+- Coverage collection has been implemented for the Cython modules
+ -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ It will also be reported to Codecov from any non-release CI jobs.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#561 <https://github.com/aio-libs/frozenlist/issues/561>`__.
+
+- A step-by-step ``Release Guide`` guide has
+ been added, describing how to release *frozenlist* -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ This is primarily targeting the maintainers.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#563 <https://github.com/aio-libs/frozenlist/issues/563>`__.
+
+- Detailed ``Contributing Guidelines`` on
+ authoring the changelog fragments have been published in the
+ documentation -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+
+ *Related issues and pull requests on GitHub:*
+ `#564 <https://github.com/aio-libs/frozenlist/issues/564>`__.
+
+
+----
+
+
+1.4.0 (2023-07-12)
+==================
+
+The published source distribution package became buildable
+under Python 3.12.
+
+
+----
+
+
+Bugfixes
+--------
+
+- Removed an unused ``typing.Tuple`` import
+ `#411 <https://github.com/aio-libs/frozenlist/issues/411>`_
+
+
+Deprecations and Removals
+-------------------------
+
+- Dropped Python 3.7 support.
+ `#413 <https://github.com/aio-libs/frozenlist/issues/413>`_
+
+
+Misc
+----
+
+- `#410 <https://github.com/aio-libs/frozenlist/issues/410>`_, `#433 <https://github.com/aio-libs/frozenlist/issues/433>`_
+
+
+----
+
+
+1.3.3 (2022-11-08)
+==================
+
+- Fixed CI runs when creating a new release, where new towncrier versions
+ fail when the current version section is already present.
+
+
+----
+
+
+1.3.2 (2022-11-08)
+==================
+
+Misc
+----
+
+- Updated the CI runs to better check for test results and to avoid deprecated syntax. `#327 <https://github.com/aio-libs/frozenlist/issues/327>`_
+
+
+----
+
+
+1.3.1 (2022-08-02)
+==================
+
+The published source distribution package became buildable
+under Python 3.11.
+
+
+----
+
+
+1.3.0 (2022-01-18)
+==================
+
+Bugfixes
+--------
+
+- Do not install C sources with binary distributions.
+ `#250 <https://github.com/aio-libs/frozenlist/issues/250>`_
+
+
+Deprecations and Removals
+-------------------------
+
+- Dropped Python 3.6 support
+ `#274 <https://github.com/aio-libs/frozenlist/issues/274>`_
+
+
+----
+
+
+1.2.0 (2021-10-16)
+==================
+
+Features
+--------
+
+- ``FrozenList`` now supports being used as a generic type as per PEP 585, e.g. ``frozen_int_list: FrozenList[int]`` (requires Python 3.9 or newer).
+ `#172 <https://github.com/aio-libs/frozenlist/issues/172>`_
+- Added support for Python 3.10.
+ `#227 <https://github.com/aio-libs/frozenlist/issues/227>`_
+- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes.
+ `#227 <https://github.com/aio-libs/frozenlist/issues/227>`_
+- Started shipping platform-specific arm64 wheels for Apple Silicon.
+ `#227 <https://github.com/aio-libs/frozenlist/issues/227>`_
+
+
+----
+
+
+1.1.1 (2020-11-14)
+==================
+
+Bugfixes
+--------
+
+- Provide x86 Windows wheels.
+ `#169 <https://github.com/aio-libs/frozenlist/issues/169>`_
+
+
+----
+
+
+1.1.0 (2020-10-13)
+==================
+
+Features
+--------
+
+- Add support for hashing of a frozen list.
+ `#136 <https://github.com/aio-libs/frozenlist/issues/136>`_
+
+- Support Python 3.8 and 3.9.
+
+- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on
+ Linux as well as ``x86_64``.
+
+
+----
+
+
+1.0.0 (2019-11-09)
+==================
+
+Deprecations and Removals
+-------------------------
+
+- Dropped support for Python 3.5; only 3.6, 3.7 and 3.8 are supported going forward.
+ `#24 <https://github.com/aio-libs/frozenlist/issues/24>`_
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD"
new file mode 100644
index 0000000..8db1e82
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/RECORD"
@@ -0,0 +1,12 @@
+frozenlist-1.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+frozenlist-1.8.0.dist-info/METADATA,sha256=Qnius9GPH4t-avW2dxupqPMnASpC1kUJ-dQIe1X3GpQ,21005
+frozenlist-1.8.0.dist-info/RECORD,,
+frozenlist-1.8.0.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101
+frozenlist-1.8.0.dist-info/licenses/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
+frozenlist-1.8.0.dist-info/top_level.txt,sha256=jivtxsPXA3nK3WBWW2LW5Mtu_GHt8UZA13NeCs2cKuA,11
+frozenlist/__init__.py,sha256=xAIE2u9ncAbjATGIPfno_OJfe8AQ-1h7z_uc73dYsEA,2108
+frozenlist/__init__.pyi,sha256=vMEoES1xGegPtVXoCi9XydEeHsyuIq-KdeXwP5PdsaA,1470
+frozenlist/__pycache__/__init__.cpython-312.pyc,,
+frozenlist/_frozenlist.cp312-win_amd64.pyd,sha256=rAPOTWexA69W76WkNpVMbF5NoOr-yHijEpjyuM6cb5Y,69632
+frozenlist/_frozenlist.pyx,sha256=t-aGjuEiVt_MZPBJ0RnraavVmPBK6arz3i48ZvXuYsU,3708
+frozenlist/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL"
new file mode 100644
index 0000000..10ac2c2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..7082a2d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/licenses/LICENSE"
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt"
new file mode 100644
index 0000000..52f13fc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist-1.8.0.dist-info/top_level.txt"
@@ -0,0 +1 @@
+frozenlist
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.py"
new file mode 100644
index 0000000..41c8595
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.py"
@@ -0,0 +1,86 @@
+import os
+import types
+from collections.abc import MutableSequence
+from functools import total_ordering
+
+__version__ = "1.8.0"
+
+__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...]
+
+
+NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool
+
+
+@total_ordering
+class FrozenList(MutableSequence):
+ __slots__ = ("_frozen", "_items")
+ __class_getitem__ = classmethod(types.GenericAlias)
+
+ def __init__(self, items=None):
+ self._frozen = False
+ if items is not None:
+ items = list(items)
+ else:
+ items = []
+ self._items = items
+
+ @property
+ def frozen(self):
+ return self._frozen
+
+ def freeze(self):
+ self._frozen = True
+
+ def __getitem__(self, index):
+ return self._items[index]
+
+ def __setitem__(self, index, value):
+ if self._frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+ self._items[index] = value
+
+ def __delitem__(self, index):
+ if self._frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+ del self._items[index]
+
+ def __len__(self):
+ return self._items.__len__()
+
+ def __iter__(self):
+ return self._items.__iter__()
+
+ def __reversed__(self):
+ return self._items.__reversed__()
+
+ def __eq__(self, other):
+ return list(self) == other
+
+ def __le__(self, other):
+ return list(self) <= other
+
+ def insert(self, pos, item):
+ if self._frozen:
+ raise RuntimeError("Cannot modify frozen list.")
+ self._items.insert(pos, item)
+
+ def __repr__(self):
+ return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
+
+ def __hash__(self):
+ if self._frozen:
+ return hash(tuple(self))
+ else:
+ raise RuntimeError("Cannot hash unfrozen list.")
+
+
+PyFrozenList = FrozenList
+
+
+if not NO_EXTENSIONS:
+ try:
+ from ._frozenlist import FrozenList as CFrozenList # type: ignore
+ except ImportError: # pragma: no cover
+ pass
+ else:
+ FrozenList = CFrozenList # type: ignore
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.pyi" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.pyi"
new file mode 100644
index 0000000..ae803ef
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/__init__.pyi"
@@ -0,0 +1,47 @@
+from typing import (
+ Generic,
+ Iterable,
+ Iterator,
+ List,
+ MutableSequence,
+ Optional,
+ TypeVar,
+ Union,
+ overload,
+)
+
+_T = TypeVar("_T")
+_Arg = Union[List[_T], Iterable[_T]]
+
+class FrozenList(MutableSequence[_T], Generic[_T]):
+ def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ...
+ @property
+ def frozen(self) -> bool: ...
+ def freeze(self) -> None: ...
+ @overload
+ def __getitem__(self, i: int) -> _T: ...
+ @overload
+ def __getitem__(self, s: slice) -> FrozenList[_T]: ...
+ @overload
+ def __setitem__(self, i: int, o: _T) -> None: ...
+ @overload
+ def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
+ @overload
+ def __delitem__(self, i: int) -> None: ...
+ @overload
+ def __delitem__(self, i: slice) -> None: ...
+ def __len__(self) -> int: ...
+ def __iter__(self) -> Iterator[_T]: ...
+ def __reversed__(self) -> Iterator[_T]: ...
+ def __eq__(self, other: object) -> bool: ...
+ def __le__(self, other: FrozenList[_T]) -> bool: ...
+ def __ne__(self, other: object) -> bool: ...
+ def __lt__(self, other: FrozenList[_T]) -> bool: ...
+ def __ge__(self, other: FrozenList[_T]) -> bool: ...
+ def __gt__(self, other: FrozenList[_T]) -> bool: ...
+ def insert(self, pos: int, item: _T) -> None: ...
+ def __repr__(self) -> str: ...
+ def __hash__(self) -> int: ...
+
+# types for C accelerators are the same
+CFrozenList = PyFrozenList = FrozenList
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..2be3e19
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.pyx" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.pyx"
new file mode 100644
index 0000000..a82d8c8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/_frozenlist.pyx"
@@ -0,0 +1,148 @@
+# cython: freethreading_compatible = True
+# distutils: language = c++
+
+from cpython.bool cimport PyBool_FromLong
+from libcpp.atomic cimport atomic
+
+import copy
+import types
+from collections.abc import MutableSequence
+
+
+cdef class FrozenList:
+ __class_getitem__ = classmethod(types.GenericAlias)
+
+ cdef atomic[bint] _frozen
+ cdef list _items
+
+ def __init__(self, items=None):
+ self._frozen.store(False)
+ if items is not None:
+ items = list(items)
+ else:
+ items = []
+ self._items = items
+
+ @property
+ def frozen(self):
+ return PyBool_FromLong(self._frozen.load())
+
+ cdef object _check_frozen(self):
+ if self._frozen.load():
+ raise RuntimeError("Cannot modify frozen list.")
+
+ cdef inline object _fast_len(self):
+ return len(self._items)
+
+ def freeze(self):
+ self._frozen.store(True)
+
+ def __getitem__(self, index):
+ return self._items[index]
+
+ def __setitem__(self, index, value):
+ self._check_frozen()
+ self._items[index] = value
+
+ def __delitem__(self, index):
+ self._check_frozen()
+ del self._items[index]
+
+ def __len__(self):
+ return self._fast_len()
+
+ def __iter__(self):
+ return self._items.__iter__()
+
+ def __reversed__(self):
+ return self._items.__reversed__()
+
+ def __richcmp__(self, other, op):
+ if op == 0: # <
+ return list(self) < other
+ if op == 1: # <=
+ return list(self) <= other
+ if op == 2: # ==
+ return list(self) == other
+ if op == 3: # !=
+ return list(self) != other
+ if op == 4: # >
+ return list(self) > other
+ if op == 5: # =>
+ return list(self) >= other
+
+ def insert(self, pos, item):
+ self._check_frozen()
+ self._items.insert(pos, item)
+
+ def __contains__(self, item):
+ return item in self._items
+
+ def __iadd__(self, items):
+ self._check_frozen()
+ self._items += list(items)
+ return self
+
+ def index(self, item):
+ return self._items.index(item)
+
+ def remove(self, item):
+ self._check_frozen()
+ self._items.remove(item)
+
+ def clear(self):
+ self._check_frozen()
+ self._items.clear()
+
+ def extend(self, items):
+ self._check_frozen()
+ self._items += list(items)
+
+ def reverse(self):
+ self._check_frozen()
+ self._items.reverse()
+
+ def pop(self, index=-1):
+ self._check_frozen()
+ return self._items.pop(index)
+
+ def append(self, item):
+ self._check_frozen()
+ return self._items.append(item)
+
+ def count(self, item):
+ return self._items.count(item)
+
+ def __repr__(self):
+ return '<FrozenList(frozen={}, {!r})>'.format(self._frozen.load(),
+ self._items)
+
+ def __hash__(self):
+ if self._frozen.load():
+ return hash(tuple(self._items))
+ else:
+ raise RuntimeError("Cannot hash unfrozen list.")
+
+ def __deepcopy__(self, memo):
+ cdef FrozenList new_list
+ obj_id = id(self)
+
+ # Return existing copy if already processed (circular reference)
+ if obj_id in memo:
+ return memo[obj_id]
+
+ # Create new instance and register immediately
+ new_list = self.__class__([])
+ memo[obj_id] = new_list
+
+ # Deep copy items
+ new_list._items[:] = [copy.deepcopy(item, memo) for item in self._items]
+
+ # Preserve frozen state
+ if self._frozen.load():
+ new_list.freeze()
+
+ return new_list
+
+
+MutableSequence.register(FrozenList)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/py.typed"
new file mode 100644
index 0000000..f5642f7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/frozenlist/py.typed"
@@ -0,0 +1 @@
+Marker
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/METADATA"
new file mode 100644
index 0000000..7a4a4b7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/METADATA"
@@ -0,0 +1,209 @@
+Metadata-Version: 2.4
+Name: idna
+Version: 3.11
+Summary: Internationalized Domain Names in Applications (IDNA)
+Author-email: Kim Davies <kim+pypi@gumleaf.org>
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-Expression: BSD-3-Clause
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: Name Service (DNS)
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+License-File: LICENSE.md
+Requires-Dist: ruff >= 0.6.2 ; extra == "all"
+Requires-Dist: mypy >= 1.11.2 ; extra == "all"
+Requires-Dist: pytest >= 8.3.2 ; extra == "all"
+Requires-Dist: flake8 >= 7.1.1 ; extra == "all"
+Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst
+Project-URL: Issue tracker, https://github.com/kjd/idna/issues
+Project-URL: Source, https://github.com/kjd/idna
+Provides-Extra: all
+
+Internationalized Domain Names in Applications (IDNA)
+=====================================================
+
+Support for `Internationalized Domain Names in
+Applications (IDNA) <https://tools.ietf.org/html/rfc5891>`_
+and `Unicode IDNA Compatibility Processing
+<https://unicode.org/reports/tr46/>`_.
+
+The latest versions of these standards supplied here provide
+more comprehensive language coverage and reduce the potential of
+allowing domains with known security vulnerabilities. This library
+is a suitable replacement for the “encodings.idna”
+module that comes with the Python standard library, but which
+only supports an older superseded IDNA specification from 2003.
+
+Basic functions are simply executed:
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+
+Installation
+------------
+
+This package is available for installation from PyPI via the
+typical mechanisms, such as:
+
+.. code-block:: bash
+
+ $ python3 -m pip install idna
+
+
+Usage
+-----
+
+For typical usage, the ``encode`` and ``decode`` functions will take a
+domain name argument and perform a conversion to ASCII compatible encoding
+(known as A-labels), or to Unicode strings (known as U-labels)
+respectively.
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('ドメイン.テスト')
+ b'xn--eckwd4c7c.xn--zckzah'
+ >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah'))
+ ドメイン.テスト
+
+Conversions can be applied at a per-label basis using the ``ulabel`` or
+``alabel`` functions if necessary:
+
+.. code-block:: pycon
+
+ >>> idna.alabel('测试')
+ b'xn--0zwm56d'
+
+
+Compatibility Mapping (UTS #46)
++++++++++++++++++++++++++++++++
+
+This library provides support for `Unicode IDNA Compatibility
+Processing <https://unicode.org/reports/tr46/>`_ which normalizes input from
+different potential ways a user may input a domain prior to performing the IDNA
+conversion operations. This functionality, known as a
+`mapping <https://tools.ietf.org/html/rfc5895>`_, is considered by the
+specification to be a local user-interface issue distinct from IDNA
+conversion functionality.
+
+For example, “Königsgäßchen” is not a permissible label as *LATIN
+CAPITAL LETTER K* is not allowed (nor are capital letters in general).
+UTS 46 will convert this into lower case prior to applying the IDNA
+conversion.
+
+.. code-block:: pycon
+
+ >>> import idna
+ >>> idna.encode('Königsgäßchen')
+ ...
+ idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed
+ >>> idna.encode('Königsgäßchen', uts46=True)
+ b'xn--knigsgchen-b4a3dun'
+ >>> print(idna.decode('xn--knigsgchen-b4a3dun'))
+ königsgäßchen
+
+
+Exceptions
+----------
+
+All errors raised during the conversion following the specification
+should raise an exception derived from the ``idna.IDNAError`` base
+class.
+
+More specific exceptions that may be generated as ``idna.IDNABidiError``
+when the error reflects an illegal combination of left-to-right and
+right-to-left characters in a label; ``idna.InvalidCodepoint`` when
+a specific codepoint is an illegal character in an IDN label (i.e.
+INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is
+illegal based on its position in the string (i.e. it is CONTEXTO or CONTEXTJ
+but the contextual requirements are not satisfied.)
+
+Building and Diagnostics
+------------------------
+
+The IDNA and UTS 46 functionality relies upon pre-calculated lookup
+tables for performance. These tables are derived from computing against
+eligibility criteria in the respective standards using the command-line
+script ``tools/idna-data``.
+
+This tool will fetch relevant codepoint data from the Unicode repository
+and perform the required calculations to identify eligibility. There are
+three main modes:
+
+* ``idna-data make-libdata``. Generates ``idnadata.py`` and
+ ``uts46data.py``, the pre-calculated lookup tables used for IDNA and
+ UTS 46 conversions. Implementers who wish to track this library against
+ a different Unicode version may use this tool to manually generate a
+ different version of the ``idnadata.py`` and ``uts46data.py`` files.
+
+* ``idna-data make-table``. Generate a table of the IDNA disposition
+ (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix
+ B.1 of RFC 5892 and the pre-computed tables published by `IANA
+ <https://www.iana.org/>`_.
+
+* ``idna-data U+0061``. Prints debugging output on the various
+ properties associated with an individual Unicode codepoint (in this
+ case, U+0061), that are used to assess the IDNA and UTS 46 status of a
+ codepoint. This is helpful in debugging or analysis.
+
+The tool accepts a number of arguments, described using ``idna-data
+-h``. Most notably, the ``--version`` argument allows the specification
+of the version of Unicode to be used in computing the table data. For
+example, ``idna-data --version 9.0.0 make-libdata`` will generate
+library data against Unicode 9.0.0.
+
+
+Additional Notes
+----------------
+
+* **Packages**. The latest tagged release version is published in the
+ `Python Package Index <https://pypi.org/project/idna/>`_.
+
+* **Version support**. This library supports Python 3.8 and higher.
+ As this library serves as a low-level toolkit for a variety of
+ applications, many of which strive for broad compatibility with older
+ Python versions, there is no rush to remove older interpreter support.
+ Support for older versions are likely to be removed from new releases
+ as automated tests can no longer easily be run, i.e. once the Python
+ version is officially end-of-life.
+
+* **Testing**. The library has a test suite based on each rule of the
+ IDNA specification, as well as tests that are provided as part of the
+ Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing
+ <https://unicode.org/reports/tr46/>`_.
+
+* **Emoji**. It is an occasional request to support emoji domains in
+ this library. Encoding of symbols like emoji is expressly prohibited by
+ the technical standard IDNA 2008 and emoji domains are broadly phased
+ out across the domain industry due to associated security risks. For
+ now, applications that need to support these non-compliant labels
+ may wish to consider trying the encode/decode operation in this library
+ first, and then falling back to using `encodings.idna`. See `the Github
+ project <https://github.com/kjd/idna/issues/18>`_ for more discussion.
+
+* **Transitional processing**. Unicode 16.0.0 removed transitional
+ processing so the `transitional` argument for the encode() method
+ no longer has any effect and will be removed at a later date.
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/RECORD"
new file mode 100644
index 0000000..1203f10
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/RECORD"
@@ -0,0 +1,22 @@
+idna-3.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+idna-3.11.dist-info/METADATA,sha256=fCwSww9SuiN8TIHllFSASUQCW55hAs8dzKnr9RaEEbA,8378
+idna-3.11.dist-info/RECORD,,
+idna-3.11.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
+idna-3.11.dist-info/licenses/LICENSE.md,sha256=t6M2q_OwThgOwGXN0W5wXQeeHMehT5EKpukYfza5zYc,1541
+idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868
+idna/__pycache__/__init__.cpython-312.pyc,,
+idna/__pycache__/codec.cpython-312.pyc,,
+idna/__pycache__/compat.cpython-312.pyc,,
+idna/__pycache__/core.cpython-312.pyc,,
+idna/__pycache__/idnadata.cpython-312.pyc,,
+idna/__pycache__/intranges.cpython-312.pyc,,
+idna/__pycache__/package_data.cpython-312.pyc,,
+idna/__pycache__/uts46data.cpython-312.pyc,,
+idna/codec.py,sha256=M2SGWN7cs_6B32QmKTyTN6xQGZeYQgQ2wiX3_DR6loE,3438
+idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316
+idna/core.py,sha256=P26_XVycuMTZ1R2mNK1ZREVzM5mvTzdabBXfyZVU1Lc,13246
+idna/idnadata.py,sha256=SG8jhaGE53iiD6B49pt2pwTv_UvClciWE-N54oR2p4U,79623
+idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898
+idna/package_data.py,sha256=_CUavOxobnbyNG2FLyHoN8QHP3QM9W1tKuw7eq9QwBk,21
+idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+idna/uts46data.py,sha256=H9J35VkD0F9L9mKOqjeNGd2A-Va6FlPoz6Jz4K7h-ps,243725
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/WHEEL"
new file mode 100644
index 0000000..d8b9936
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/WHEEL"
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.12.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md"
new file mode 100644
index 0000000..256ba90
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna-3.11.dist-info/licenses/LICENSE.md"
@@ -0,0 +1,31 @@
+BSD 3-Clause License
+
+Copyright (c) 2013-2025, Kim Davies and contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/__init__.py"
new file mode 100644
index 0000000..cfdc030
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/__init__.py"
@@ -0,0 +1,45 @@
+from .core import (
+ IDNABidiError,
+ IDNAError,
+ InvalidCodepoint,
+ InvalidCodepointContext,
+ alabel,
+ check_bidi,
+ check_hyphen_ok,
+ check_initial_combiner,
+ check_label,
+ check_nfc,
+ decode,
+ encode,
+ ulabel,
+ uts46_remap,
+ valid_contextj,
+ valid_contexto,
+ valid_label_length,
+ valid_string_length,
+)
+from .intranges import intranges_contain
+from .package_data import __version__
+
+__all__ = [
+ "__version__",
+ "IDNABidiError",
+ "IDNAError",
+ "InvalidCodepoint",
+ "InvalidCodepointContext",
+ "alabel",
+ "check_bidi",
+ "check_hyphen_ok",
+ "check_initial_combiner",
+ "check_label",
+ "check_nfc",
+ "decode",
+ "encode",
+ "intranges_contain",
+ "ulabel",
+ "uts46_remap",
+ "valid_contextj",
+ "valid_contexto",
+ "valid_label_length",
+ "valid_string_length",
+]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/codec.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/codec.py"
new file mode 100644
index 0000000..cbc2e4f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/codec.py"
@@ -0,0 +1,122 @@
+import codecs
+import re
+from typing import Any, Optional, Tuple
+
+from .core import IDNAError, alabel, decode, encode, ulabel
+
+_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
+
+
+class Codec(codecs.Codec):
+ def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return b"", 0
+
+ return encode(data), len(data)
+
+ def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return "", 0
+
+ return decode(data), len(data)
+
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+ def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return b"", 0
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = b""
+ if labels:
+ if not labels[-1]:
+ trailing_dot = b"."
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = b"."
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(alabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ # Join with U+002E
+ result_bytes = b".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return result_bytes, size
+
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return ("", 0)
+
+ if not isinstance(data, str):
+ data = str(data, "ascii")
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = ""
+ if labels:
+ if not labels[-1]:
+ trailing_dot = "."
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = "."
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(ulabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ result_str = ".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result_str, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+
+def search_function(name: str) -> Optional[codecs.CodecInfo]:
+ if name != "idna2008":
+ return None
+ return codecs.CodecInfo(
+ name=name,
+ encode=Codec().encode,
+ decode=Codec().decode, # type: ignore
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamwriter=StreamWriter,
+ streamreader=StreamReader,
+ )
+
+
+codecs.register(search_function)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/compat.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/compat.py"
new file mode 100644
index 0000000..1df9f2a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/compat.py"
@@ -0,0 +1,15 @@
+from typing import Any, Union
+
+from .core import decode, encode
+
+
+def ToASCII(label: str) -> bytes:
+ return encode(label)
+
+
+def ToUnicode(label: Union[bytes, bytearray]) -> str:
+ return decode(label)
+
+
+def nameprep(s: Any) -> None:
+ raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/core.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/core.py"
new file mode 100644
index 0000000..8177bf7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/core.py"
@@ -0,0 +1,437 @@
+import bisect
+import re
+import unicodedata
+from typing import Optional, Union
+
+from . import idnadata
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b"xn--"
+_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
+
+
+class IDNAError(UnicodeError):
+ """Base exception for all IDNA-encoding related problems"""
+
+ pass
+
+
+class IDNABidiError(IDNAError):
+ """Exception when bidirectional requirements are not satisfied"""
+
+ pass
+
+
+class InvalidCodepoint(IDNAError):
+ """Exception when a disallowed or unallocated codepoint is used"""
+
+ pass
+
+
+class InvalidCodepointContext(IDNAError):
+ """Exception when the codepoint is not valid in the context it is used"""
+
+ pass
+
+
+def _combining_class(cp: int) -> int:
+ v = unicodedata.combining(chr(cp))
+ if v == 0:
+ if not unicodedata.name(chr(cp)):
+ raise ValueError("Unknown character in unicodedata")
+ return v
+
+
+def _is_script(cp: str, script: str) -> bool:
+ return intranges_contain(ord(cp), idnadata.scripts[script])
+
+
+def _punycode(s: str) -> bytes:
+ return s.encode("punycode")
+
+
+def _unot(s: int) -> str:
+ return "U+{:04X}".format(s)
+
+
+def valid_label_length(label: Union[bytes, str]) -> bool:
+ if len(label) > 63:
+ return False
+ return True
+
+
+def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool:
+ if len(label) > (254 if trailing_dot else 253):
+ return False
+ return True
+
+
+def check_bidi(label: str, check_ltr: bool = False) -> bool:
+ # Bidi rules should only be applied if string contains RTL characters
+ bidi_label = False
+ for idx, cp in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+ if direction == "":
+ # String likely comes from a newer version of Unicode
+ raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx))
+ if direction in ["R", "AL", "AN"]:
+ bidi_label = True
+ if not bidi_label and not check_ltr:
+ return True
+
+ # Bidi rule 1
+ direction = unicodedata.bidirectional(label[0])
+ if direction in ["R", "AL"]:
+ rtl = True
+ elif direction == "L":
+ rtl = False
+ else:
+ raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label)))
+
+ valid_ending = False
+ number_type: Optional[str] = None
+ for idx, cp in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+
+ if rtl:
+ # Bidi rule 2
+ if direction not in [
+ "R",
+ "AL",
+ "AN",
+ "EN",
+ "ES",
+ "CS",
+ "ET",
+ "ON",
+ "BN",
+ "NSM",
+ ]:
+ raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx))
+ # Bidi rule 3
+ if direction in ["R", "AL", "EN", "AN"]:
+ valid_ending = True
+ elif direction != "NSM":
+ valid_ending = False
+ # Bidi rule 4
+ if direction in ["AN", "EN"]:
+ if not number_type:
+ number_type = direction
+ else:
+ if number_type != direction:
+ raise IDNABidiError("Can not mix numeral types in a right-to-left label")
+ else:
+ # Bidi rule 5
+ if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]:
+ raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx))
+ # Bidi rule 6
+ if direction in ["L", "EN"]:
+ valid_ending = True
+ elif direction != "NSM":
+ valid_ending = False
+
+ if not valid_ending:
+ raise IDNABidiError("Label ends with illegal codepoint directionality")
+
+ return True
+
+
+def check_initial_combiner(label: str) -> bool:
+ if unicodedata.category(label[0])[0] == "M":
+ raise IDNAError("Label begins with an illegal combining character")
+ return True
+
+
+def check_hyphen_ok(label: str) -> bool:
+ if label[2:4] == "--":
+ raise IDNAError("Label has disallowed hyphens in 3rd and 4th position")
+ if label[0] == "-" or label[-1] == "-":
+ raise IDNAError("Label must not start or end with a hyphen")
+ return True
+
+
+def check_nfc(label: str) -> None:
+ if unicodedata.normalize("NFC", label) != label:
+ raise IDNAError("Label must be in Normalization Form C")
+
+
+def valid_contextj(label: str, pos: int) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x200C:
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+
+ ok = False
+ for i in range(pos - 1, -1, -1):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord("T"):
+ continue
+ elif joining_type in [ord("L"), ord("D")]:
+ ok = True
+ break
+ else:
+ break
+
+ if not ok:
+ return False
+
+ ok = False
+ for i in range(pos + 1, len(label)):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord("T"):
+ continue
+ elif joining_type in [ord("R"), ord("D")]:
+ ok = True
+ break
+ else:
+ break
+ return ok
+
+ if cp_value == 0x200D:
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+ return False
+
+ else:
+ return False
+
+
+def valid_contexto(label: str, pos: int, exception: bool = False) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x00B7:
+ if 0 < pos < len(label) - 1:
+ if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C:
+ return True
+ return False
+
+ elif cp_value == 0x0375:
+ if pos < len(label) - 1 and len(label) > 1:
+ return _is_script(label[pos + 1], "Greek")
+ return False
+
+ elif cp_value == 0x05F3 or cp_value == 0x05F4:
+ if pos > 0:
+ return _is_script(label[pos - 1], "Hebrew")
+ return False
+
+ elif cp_value == 0x30FB:
+ for cp in label:
+ if cp == "\u30fb":
+ continue
+ if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"):
+ return True
+ return False
+
+ elif 0x660 <= cp_value <= 0x669:
+ for cp in label:
+ if 0x6F0 <= ord(cp) <= 0x06F9:
+ return False
+ return True
+
+ elif 0x6F0 <= cp_value <= 0x6F9:
+ for cp in label:
+ if 0x660 <= ord(cp) <= 0x0669:
+ return False
+ return True
+
+ return False
+
+
+def check_label(label: Union[str, bytes, bytearray]) -> None:
+ if isinstance(label, (bytes, bytearray)):
+ label = label.decode("utf-8")
+ if len(label) == 0:
+ raise IDNAError("Empty Label")
+
+ check_nfc(label)
+ check_hyphen_ok(label)
+ check_initial_combiner(label)
+
+ for pos, cp in enumerate(label):
+ cp_value = ord(cp)
+ if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]):
+ continue
+ elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]):
+ try:
+ if not valid_contextj(label, pos):
+ raise InvalidCodepointContext(
+ "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label))
+ )
+ except ValueError:
+ raise IDNAError(
+ "Unknown codepoint adjacent to joiner {} at position {} in {}".format(
+ _unot(cp_value), pos + 1, repr(label)
+ )
+ )
+ elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]):
+ if not valid_contexto(label, pos):
+ raise InvalidCodepointContext(
+ "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label))
+ )
+ else:
+ raise InvalidCodepoint(
+ "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label))
+ )
+
+ check_bidi(label)
+
+
+def alabel(label: str) -> bytes:
+ try:
+ label_bytes = label.encode("ascii")
+ ulabel(label_bytes)
+ if not valid_label_length(label_bytes):
+ raise IDNAError("Label too long")
+ return label_bytes
+ except UnicodeEncodeError:
+ pass
+
+ check_label(label)
+ label_bytes = _alabel_prefix + _punycode(label)
+
+ if not valid_label_length(label_bytes):
+ raise IDNAError("Label too long")
+
+ return label_bytes
+
+
+def ulabel(label: Union[str, bytes, bytearray]) -> str:
+ if not isinstance(label, (bytes, bytearray)):
+ try:
+ label_bytes = label.encode("ascii")
+ except UnicodeEncodeError:
+ check_label(label)
+ return label
+ else:
+ label_bytes = bytes(label)
+
+ label_bytes = label_bytes.lower()
+ if label_bytes.startswith(_alabel_prefix):
+ label_bytes = label_bytes[len(_alabel_prefix) :]
+ if not label_bytes:
+ raise IDNAError("Malformed A-label, no Punycode eligible content found")
+ if label_bytes.decode("ascii")[-1] == "-":
+ raise IDNAError("A-label must not end with a hyphen")
+ else:
+ check_label(label_bytes)
+ return label_bytes.decode("ascii")
+
+ try:
+ label = label_bytes.decode("punycode")
+ except UnicodeError:
+ raise IDNAError("Invalid A-label")
+ check_label(label)
+ return label
+
+
+def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str:
+ """Re-map the characters in the string according to UTS46 processing."""
+ from .uts46data import uts46data
+
+ output = ""
+
+ for pos, char in enumerate(domain):
+ code_point = ord(char)
+ try:
+ uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
+ status = uts46row[1]
+ replacement: Optional[str] = None
+ if len(uts46row) == 3:
+ replacement = uts46row[2]
+ if (
+ status == "V"
+ or (status == "D" and not transitional)
+ or (status == "3" and not std3_rules and replacement is None)
+ ):
+ output += char
+ elif replacement is not None and (
+ status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional)
+ ):
+ output += replacement
+ elif status != "I":
+ raise IndexError()
+ except IndexError:
+ raise InvalidCodepoint(
+ "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain))
+ )
+
+ return unicodedata.normalize("NFC", output)
+
+
+def encode(
+ s: Union[str, bytes, bytearray],
+ strict: bool = False,
+ uts46: bool = False,
+ std3_rules: bool = False,
+ transitional: bool = False,
+) -> bytes:
+ if not isinstance(s, str):
+ try:
+ s = str(s, "ascii")
+ except UnicodeDecodeError:
+ raise IDNAError("should pass a unicode string to the function rather than a byte string.")
+ if uts46:
+ s = uts46_remap(s, std3_rules, transitional)
+ trailing_dot = False
+ result = []
+ if strict:
+ labels = s.split(".")
+ else:
+ labels = _unicode_dots_re.split(s)
+ if not labels or labels == [""]:
+ raise IDNAError("Empty domain")
+ if labels[-1] == "":
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = alabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError("Empty label")
+ if trailing_dot:
+ result.append(b"")
+ s = b".".join(result)
+ if not valid_string_length(s, trailing_dot):
+ raise IDNAError("Domain too long")
+ return s
+
+
+def decode(
+ s: Union[str, bytes, bytearray],
+ strict: bool = False,
+ uts46: bool = False,
+ std3_rules: bool = False,
+) -> str:
+ try:
+ if not isinstance(s, str):
+ s = str(s, "ascii")
+ except UnicodeDecodeError:
+ raise IDNAError("Invalid ASCII in A-label")
+ if uts46:
+ s = uts46_remap(s, std3_rules, False)
+ trailing_dot = False
+ result = []
+ if not strict:
+ labels = _unicode_dots_re.split(s)
+ else:
+ labels = s.split(".")
+ if not labels or labels == [""]:
+ raise IDNAError("Empty domain")
+ if not labels[-1]:
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = ulabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError("Empty label")
+ if trailing_dot:
+ result.append("")
+ return ".".join(result)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/idnadata.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/idnadata.py"
new file mode 100644
index 0000000..ded47ca
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/idnadata.py"
@@ -0,0 +1,4309 @@
+# This file is automatically generated by tools/idna-data
+
+__version__ = "16.0.0"
+
+scripts = {
+ "Greek": (
+ 0x37000000374,
+ 0x37500000378,
+ 0x37A0000037E,
+ 0x37F00000380,
+ 0x38400000385,
+ 0x38600000387,
+ 0x3880000038B,
+ 0x38C0000038D,
+ 0x38E000003A2,
+ 0x3A3000003E2,
+ 0x3F000000400,
+ 0x1D2600001D2B,
+ 0x1D5D00001D62,
+ 0x1D6600001D6B,
+ 0x1DBF00001DC0,
+ 0x1F0000001F16,
+ 0x1F1800001F1E,
+ 0x1F2000001F46,
+ 0x1F4800001F4E,
+ 0x1F5000001F58,
+ 0x1F5900001F5A,
+ 0x1F5B00001F5C,
+ 0x1F5D00001F5E,
+ 0x1F5F00001F7E,
+ 0x1F8000001FB5,
+ 0x1FB600001FC5,
+ 0x1FC600001FD4,
+ 0x1FD600001FDC,
+ 0x1FDD00001FF0,
+ 0x1FF200001FF5,
+ 0x1FF600001FFF,
+ 0x212600002127,
+ 0xAB650000AB66,
+ 0x101400001018F,
+ 0x101A0000101A1,
+ 0x1D2000001D246,
+ ),
+ "Han": (
+ 0x2E8000002E9A,
+ 0x2E9B00002EF4,
+ 0x2F0000002FD6,
+ 0x300500003006,
+ 0x300700003008,
+ 0x30210000302A,
+ 0x30380000303C,
+ 0x340000004DC0,
+ 0x4E000000A000,
+ 0xF9000000FA6E,
+ 0xFA700000FADA,
+ 0x16FE200016FE4,
+ 0x16FF000016FF2,
+ 0x200000002A6E0,
+ 0x2A7000002B73A,
+ 0x2B7400002B81E,
+ 0x2B8200002CEA2,
+ 0x2CEB00002EBE1,
+ 0x2EBF00002EE5E,
+ 0x2F8000002FA1E,
+ 0x300000003134B,
+ 0x31350000323B0,
+ ),
+ "Hebrew": (
+ 0x591000005C8,
+ 0x5D0000005EB,
+ 0x5EF000005F5,
+ 0xFB1D0000FB37,
+ 0xFB380000FB3D,
+ 0xFB3E0000FB3F,
+ 0xFB400000FB42,
+ 0xFB430000FB45,
+ 0xFB460000FB50,
+ ),
+ "Hiragana": (
+ 0x304100003097,
+ 0x309D000030A0,
+ 0x1B0010001B120,
+ 0x1B1320001B133,
+ 0x1B1500001B153,
+ 0x1F2000001F201,
+ ),
+ "Katakana": (
+ 0x30A1000030FB,
+ 0x30FD00003100,
+ 0x31F000003200,
+ 0x32D0000032FF,
+ 0x330000003358,
+ 0xFF660000FF70,
+ 0xFF710000FF9E,
+ 0x1AFF00001AFF4,
+ 0x1AFF50001AFFC,
+ 0x1AFFD0001AFFF,
+ 0x1B0000001B001,
+ 0x1B1200001B123,
+ 0x1B1550001B156,
+ 0x1B1640001B168,
+ ),
+}
+joining_types = {
+ 0xAD: 84,
+ 0x300: 84,
+ 0x301: 84,
+ 0x302: 84,
+ 0x303: 84,
+ 0x304: 84,
+ 0x305: 84,
+ 0x306: 84,
+ 0x307: 84,
+ 0x308: 84,
+ 0x309: 84,
+ 0x30A: 84,
+ 0x30B: 84,
+ 0x30C: 84,
+ 0x30D: 84,
+ 0x30E: 84,
+ 0x30F: 84,
+ 0x310: 84,
+ 0x311: 84,
+ 0x312: 84,
+ 0x313: 84,
+ 0x314: 84,
+ 0x315: 84,
+ 0x316: 84,
+ 0x317: 84,
+ 0x318: 84,
+ 0x319: 84,
+ 0x31A: 84,
+ 0x31B: 84,
+ 0x31C: 84,
+ 0x31D: 84,
+ 0x31E: 84,
+ 0x31F: 84,
+ 0x320: 84,
+ 0x321: 84,
+ 0x322: 84,
+ 0x323: 84,
+ 0x324: 84,
+ 0x325: 84,
+ 0x326: 84,
+ 0x327: 84,
+ 0x328: 84,
+ 0x329: 84,
+ 0x32A: 84,
+ 0x32B: 84,
+ 0x32C: 84,
+ 0x32D: 84,
+ 0x32E: 84,
+ 0x32F: 84,
+ 0x330: 84,
+ 0x331: 84,
+ 0x332: 84,
+ 0x333: 84,
+ 0x334: 84,
+ 0x335: 84,
+ 0x336: 84,
+ 0x337: 84,
+ 0x338: 84,
+ 0x339: 84,
+ 0x33A: 84,
+ 0x33B: 84,
+ 0x33C: 84,
+ 0x33D: 84,
+ 0x33E: 84,
+ 0x33F: 84,
+ 0x340: 84,
+ 0x341: 84,
+ 0x342: 84,
+ 0x343: 84,
+ 0x344: 84,
+ 0x345: 84,
+ 0x346: 84,
+ 0x347: 84,
+ 0x348: 84,
+ 0x349: 84,
+ 0x34A: 84,
+ 0x34B: 84,
+ 0x34C: 84,
+ 0x34D: 84,
+ 0x34E: 84,
+ 0x34F: 84,
+ 0x350: 84,
+ 0x351: 84,
+ 0x352: 84,
+ 0x353: 84,
+ 0x354: 84,
+ 0x355: 84,
+ 0x356: 84,
+ 0x357: 84,
+ 0x358: 84,
+ 0x359: 84,
+ 0x35A: 84,
+ 0x35B: 84,
+ 0x35C: 84,
+ 0x35D: 84,
+ 0x35E: 84,
+ 0x35F: 84,
+ 0x360: 84,
+ 0x361: 84,
+ 0x362: 84,
+ 0x363: 84,
+ 0x364: 84,
+ 0x365: 84,
+ 0x366: 84,
+ 0x367: 84,
+ 0x368: 84,
+ 0x369: 84,
+ 0x36A: 84,
+ 0x36B: 84,
+ 0x36C: 84,
+ 0x36D: 84,
+ 0x36E: 84,
+ 0x36F: 84,
+ 0x483: 84,
+ 0x484: 84,
+ 0x485: 84,
+ 0x486: 84,
+ 0x487: 84,
+ 0x488: 84,
+ 0x489: 84,
+ 0x591: 84,
+ 0x592: 84,
+ 0x593: 84,
+ 0x594: 84,
+ 0x595: 84,
+ 0x596: 84,
+ 0x597: 84,
+ 0x598: 84,
+ 0x599: 84,
+ 0x59A: 84,
+ 0x59B: 84,
+ 0x59C: 84,
+ 0x59D: 84,
+ 0x59E: 84,
+ 0x59F: 84,
+ 0x5A0: 84,
+ 0x5A1: 84,
+ 0x5A2: 84,
+ 0x5A3: 84,
+ 0x5A4: 84,
+ 0x5A5: 84,
+ 0x5A6: 84,
+ 0x5A7: 84,
+ 0x5A8: 84,
+ 0x5A9: 84,
+ 0x5AA: 84,
+ 0x5AB: 84,
+ 0x5AC: 84,
+ 0x5AD: 84,
+ 0x5AE: 84,
+ 0x5AF: 84,
+ 0x5B0: 84,
+ 0x5B1: 84,
+ 0x5B2: 84,
+ 0x5B3: 84,
+ 0x5B4: 84,
+ 0x5B5: 84,
+ 0x5B6: 84,
+ 0x5B7: 84,
+ 0x5B8: 84,
+ 0x5B9: 84,
+ 0x5BA: 84,
+ 0x5BB: 84,
+ 0x5BC: 84,
+ 0x5BD: 84,
+ 0x5BF: 84,
+ 0x5C1: 84,
+ 0x5C2: 84,
+ 0x5C4: 84,
+ 0x5C5: 84,
+ 0x5C7: 84,
+ 0x610: 84,
+ 0x611: 84,
+ 0x612: 84,
+ 0x613: 84,
+ 0x614: 84,
+ 0x615: 84,
+ 0x616: 84,
+ 0x617: 84,
+ 0x618: 84,
+ 0x619: 84,
+ 0x61A: 84,
+ 0x61C: 84,
+ 0x620: 68,
+ 0x622: 82,
+ 0x623: 82,
+ 0x624: 82,
+ 0x625: 82,
+ 0x626: 68,
+ 0x627: 82,
+ 0x628: 68,
+ 0x629: 82,
+ 0x62A: 68,
+ 0x62B: 68,
+ 0x62C: 68,
+ 0x62D: 68,
+ 0x62E: 68,
+ 0x62F: 82,
+ 0x630: 82,
+ 0x631: 82,
+ 0x632: 82,
+ 0x633: 68,
+ 0x634: 68,
+ 0x635: 68,
+ 0x636: 68,
+ 0x637: 68,
+ 0x638: 68,
+ 0x639: 68,
+ 0x63A: 68,
+ 0x63B: 68,
+ 0x63C: 68,
+ 0x63D: 68,
+ 0x63E: 68,
+ 0x63F: 68,
+ 0x640: 67,
+ 0x641: 68,
+ 0x642: 68,
+ 0x643: 68,
+ 0x644: 68,
+ 0x645: 68,
+ 0x646: 68,
+ 0x647: 68,
+ 0x648: 82,
+ 0x649: 68,
+ 0x64A: 68,
+ 0x64B: 84,
+ 0x64C: 84,
+ 0x64D: 84,
+ 0x64E: 84,
+ 0x64F: 84,
+ 0x650: 84,
+ 0x651: 84,
+ 0x652: 84,
+ 0x653: 84,
+ 0x654: 84,
+ 0x655: 84,
+ 0x656: 84,
+ 0x657: 84,
+ 0x658: 84,
+ 0x659: 84,
+ 0x65A: 84,
+ 0x65B: 84,
+ 0x65C: 84,
+ 0x65D: 84,
+ 0x65E: 84,
+ 0x65F: 84,
+ 0x66E: 68,
+ 0x66F: 68,
+ 0x670: 84,
+ 0x671: 82,
+ 0x672: 82,
+ 0x673: 82,
+ 0x675: 82,
+ 0x676: 82,
+ 0x677: 82,
+ 0x678: 68,
+ 0x679: 68,
+ 0x67A: 68,
+ 0x67B: 68,
+ 0x67C: 68,
+ 0x67D: 68,
+ 0x67E: 68,
+ 0x67F: 68,
+ 0x680: 68,
+ 0x681: 68,
+ 0x682: 68,
+ 0x683: 68,
+ 0x684: 68,
+ 0x685: 68,
+ 0x686: 68,
+ 0x687: 68,
+ 0x688: 82,
+ 0x689: 82,
+ 0x68A: 82,
+ 0x68B: 82,
+ 0x68C: 82,
+ 0x68D: 82,
+ 0x68E: 82,
+ 0x68F: 82,
+ 0x690: 82,
+ 0x691: 82,
+ 0x692: 82,
+ 0x693: 82,
+ 0x694: 82,
+ 0x695: 82,
+ 0x696: 82,
+ 0x697: 82,
+ 0x698: 82,
+ 0x699: 82,
+ 0x69A: 68,
+ 0x69B: 68,
+ 0x69C: 68,
+ 0x69D: 68,
+ 0x69E: 68,
+ 0x69F: 68,
+ 0x6A0: 68,
+ 0x6A1: 68,
+ 0x6A2: 68,
+ 0x6A3: 68,
+ 0x6A4: 68,
+ 0x6A5: 68,
+ 0x6A6: 68,
+ 0x6A7: 68,
+ 0x6A8: 68,
+ 0x6A9: 68,
+ 0x6AA: 68,
+ 0x6AB: 68,
+ 0x6AC: 68,
+ 0x6AD: 68,
+ 0x6AE: 68,
+ 0x6AF: 68,
+ 0x6B0: 68,
+ 0x6B1: 68,
+ 0x6B2: 68,
+ 0x6B3: 68,
+ 0x6B4: 68,
+ 0x6B5: 68,
+ 0x6B6: 68,
+ 0x6B7: 68,
+ 0x6B8: 68,
+ 0x6B9: 68,
+ 0x6BA: 68,
+ 0x6BB: 68,
+ 0x6BC: 68,
+ 0x6BD: 68,
+ 0x6BE: 68,
+ 0x6BF: 68,
+ 0x6C0: 82,
+ 0x6C1: 68,
+ 0x6C2: 68,
+ 0x6C3: 82,
+ 0x6C4: 82,
+ 0x6C5: 82,
+ 0x6C6: 82,
+ 0x6C7: 82,
+ 0x6C8: 82,
+ 0x6C9: 82,
+ 0x6CA: 82,
+ 0x6CB: 82,
+ 0x6CC: 68,
+ 0x6CD: 82,
+ 0x6CE: 68,
+ 0x6CF: 82,
+ 0x6D0: 68,
+ 0x6D1: 68,
+ 0x6D2: 82,
+ 0x6D3: 82,
+ 0x6D5: 82,
+ 0x6D6: 84,
+ 0x6D7: 84,
+ 0x6D8: 84,
+ 0x6D9: 84,
+ 0x6DA: 84,
+ 0x6DB: 84,
+ 0x6DC: 84,
+ 0x6DF: 84,
+ 0x6E0: 84,
+ 0x6E1: 84,
+ 0x6E2: 84,
+ 0x6E3: 84,
+ 0x6E4: 84,
+ 0x6E7: 84,
+ 0x6E8: 84,
+ 0x6EA: 84,
+ 0x6EB: 84,
+ 0x6EC: 84,
+ 0x6ED: 84,
+ 0x6EE: 82,
+ 0x6EF: 82,
+ 0x6FA: 68,
+ 0x6FB: 68,
+ 0x6FC: 68,
+ 0x6FF: 68,
+ 0x70F: 84,
+ 0x710: 82,
+ 0x711: 84,
+ 0x712: 68,
+ 0x713: 68,
+ 0x714: 68,
+ 0x715: 82,
+ 0x716: 82,
+ 0x717: 82,
+ 0x718: 82,
+ 0x719: 82,
+ 0x71A: 68,
+ 0x71B: 68,
+ 0x71C: 68,
+ 0x71D: 68,
+ 0x71E: 82,
+ 0x71F: 68,
+ 0x720: 68,
+ 0x721: 68,
+ 0x722: 68,
+ 0x723: 68,
+ 0x724: 68,
+ 0x725: 68,
+ 0x726: 68,
+ 0x727: 68,
+ 0x728: 82,
+ 0x729: 68,
+ 0x72A: 82,
+ 0x72B: 68,
+ 0x72C: 82,
+ 0x72D: 68,
+ 0x72E: 68,
+ 0x72F: 82,
+ 0x730: 84,
+ 0x731: 84,
+ 0x732: 84,
+ 0x733: 84,
+ 0x734: 84,
+ 0x735: 84,
+ 0x736: 84,
+ 0x737: 84,
+ 0x738: 84,
+ 0x739: 84,
+ 0x73A: 84,
+ 0x73B: 84,
+ 0x73C: 84,
+ 0x73D: 84,
+ 0x73E: 84,
+ 0x73F: 84,
+ 0x740: 84,
+ 0x741: 84,
+ 0x742: 84,
+ 0x743: 84,
+ 0x744: 84,
+ 0x745: 84,
+ 0x746: 84,
+ 0x747: 84,
+ 0x748: 84,
+ 0x749: 84,
+ 0x74A: 84,
+ 0x74D: 82,
+ 0x74E: 68,
+ 0x74F: 68,
+ 0x750: 68,
+ 0x751: 68,
+ 0x752: 68,
+ 0x753: 68,
+ 0x754: 68,
+ 0x755: 68,
+ 0x756: 68,
+ 0x757: 68,
+ 0x758: 68,
+ 0x759: 82,
+ 0x75A: 82,
+ 0x75B: 82,
+ 0x75C: 68,
+ 0x75D: 68,
+ 0x75E: 68,
+ 0x75F: 68,
+ 0x760: 68,
+ 0x761: 68,
+ 0x762: 68,
+ 0x763: 68,
+ 0x764: 68,
+ 0x765: 68,
+ 0x766: 68,
+ 0x767: 68,
+ 0x768: 68,
+ 0x769: 68,
+ 0x76A: 68,
+ 0x76B: 82,
+ 0x76C: 82,
+ 0x76D: 68,
+ 0x76E: 68,
+ 0x76F: 68,
+ 0x770: 68,
+ 0x771: 82,
+ 0x772: 68,
+ 0x773: 82,
+ 0x774: 82,
+ 0x775: 68,
+ 0x776: 68,
+ 0x777: 68,
+ 0x778: 82,
+ 0x779: 82,
+ 0x77A: 68,
+ 0x77B: 68,
+ 0x77C: 68,
+ 0x77D: 68,
+ 0x77E: 68,
+ 0x77F: 68,
+ 0x7A6: 84,
+ 0x7A7: 84,
+ 0x7A8: 84,
+ 0x7A9: 84,
+ 0x7AA: 84,
+ 0x7AB: 84,
+ 0x7AC: 84,
+ 0x7AD: 84,
+ 0x7AE: 84,
+ 0x7AF: 84,
+ 0x7B0: 84,
+ 0x7CA: 68,
+ 0x7CB: 68,
+ 0x7CC: 68,
+ 0x7CD: 68,
+ 0x7CE: 68,
+ 0x7CF: 68,
+ 0x7D0: 68,
+ 0x7D1: 68,
+ 0x7D2: 68,
+ 0x7D3: 68,
+ 0x7D4: 68,
+ 0x7D5: 68,
+ 0x7D6: 68,
+ 0x7D7: 68,
+ 0x7D8: 68,
+ 0x7D9: 68,
+ 0x7DA: 68,
+ 0x7DB: 68,
+ 0x7DC: 68,
+ 0x7DD: 68,
+ 0x7DE: 68,
+ 0x7DF: 68,
+ 0x7E0: 68,
+ 0x7E1: 68,
+ 0x7E2: 68,
+ 0x7E3: 68,
+ 0x7E4: 68,
+ 0x7E5: 68,
+ 0x7E6: 68,
+ 0x7E7: 68,
+ 0x7E8: 68,
+ 0x7E9: 68,
+ 0x7EA: 68,
+ 0x7EB: 84,
+ 0x7EC: 84,
+ 0x7ED: 84,
+ 0x7EE: 84,
+ 0x7EF: 84,
+ 0x7F0: 84,
+ 0x7F1: 84,
+ 0x7F2: 84,
+ 0x7F3: 84,
+ 0x7FA: 67,
+ 0x7FD: 84,
+ 0x816: 84,
+ 0x817: 84,
+ 0x818: 84,
+ 0x819: 84,
+ 0x81B: 84,
+ 0x81C: 84,
+ 0x81D: 84,
+ 0x81E: 84,
+ 0x81F: 84,
+ 0x820: 84,
+ 0x821: 84,
+ 0x822: 84,
+ 0x823: 84,
+ 0x825: 84,
+ 0x826: 84,
+ 0x827: 84,
+ 0x829: 84,
+ 0x82A: 84,
+ 0x82B: 84,
+ 0x82C: 84,
+ 0x82D: 84,
+ 0x840: 82,
+ 0x841: 68,
+ 0x842: 68,
+ 0x843: 68,
+ 0x844: 68,
+ 0x845: 68,
+ 0x846: 82,
+ 0x847: 82,
+ 0x848: 68,
+ 0x849: 82,
+ 0x84A: 68,
+ 0x84B: 68,
+ 0x84C: 68,
+ 0x84D: 68,
+ 0x84E: 68,
+ 0x84F: 68,
+ 0x850: 68,
+ 0x851: 68,
+ 0x852: 68,
+ 0x853: 68,
+ 0x854: 82,
+ 0x855: 68,
+ 0x856: 82,
+ 0x857: 82,
+ 0x858: 82,
+ 0x859: 84,
+ 0x85A: 84,
+ 0x85B: 84,
+ 0x860: 68,
+ 0x862: 68,
+ 0x863: 68,
+ 0x864: 68,
+ 0x865: 68,
+ 0x867: 82,
+ 0x868: 68,
+ 0x869: 82,
+ 0x86A: 82,
+ 0x870: 82,
+ 0x871: 82,
+ 0x872: 82,
+ 0x873: 82,
+ 0x874: 82,
+ 0x875: 82,
+ 0x876: 82,
+ 0x877: 82,
+ 0x878: 82,
+ 0x879: 82,
+ 0x87A: 82,
+ 0x87B: 82,
+ 0x87C: 82,
+ 0x87D: 82,
+ 0x87E: 82,
+ 0x87F: 82,
+ 0x880: 82,
+ 0x881: 82,
+ 0x882: 82,
+ 0x883: 67,
+ 0x884: 67,
+ 0x885: 67,
+ 0x886: 68,
+ 0x889: 68,
+ 0x88A: 68,
+ 0x88B: 68,
+ 0x88C: 68,
+ 0x88D: 68,
+ 0x88E: 82,
+ 0x897: 84,
+ 0x898: 84,
+ 0x899: 84,
+ 0x89A: 84,
+ 0x89B: 84,
+ 0x89C: 84,
+ 0x89D: 84,
+ 0x89E: 84,
+ 0x89F: 84,
+ 0x8A0: 68,
+ 0x8A1: 68,
+ 0x8A2: 68,
+ 0x8A3: 68,
+ 0x8A4: 68,
+ 0x8A5: 68,
+ 0x8A6: 68,
+ 0x8A7: 68,
+ 0x8A8: 68,
+ 0x8A9: 68,
+ 0x8AA: 82,
+ 0x8AB: 82,
+ 0x8AC: 82,
+ 0x8AE: 82,
+ 0x8AF: 68,
+ 0x8B0: 68,
+ 0x8B1: 82,
+ 0x8B2: 82,
+ 0x8B3: 68,
+ 0x8B4: 68,
+ 0x8B5: 68,
+ 0x8B6: 68,
+ 0x8B7: 68,
+ 0x8B8: 68,
+ 0x8B9: 82,
+ 0x8BA: 68,
+ 0x8BB: 68,
+ 0x8BC: 68,
+ 0x8BD: 68,
+ 0x8BE: 68,
+ 0x8BF: 68,
+ 0x8C0: 68,
+ 0x8C1: 68,
+ 0x8C2: 68,
+ 0x8C3: 68,
+ 0x8C4: 68,
+ 0x8C5: 68,
+ 0x8C6: 68,
+ 0x8C7: 68,
+ 0x8C8: 68,
+ 0x8CA: 84,
+ 0x8CB: 84,
+ 0x8CC: 84,
+ 0x8CD: 84,
+ 0x8CE: 84,
+ 0x8CF: 84,
+ 0x8D0: 84,
+ 0x8D1: 84,
+ 0x8D2: 84,
+ 0x8D3: 84,
+ 0x8D4: 84,
+ 0x8D5: 84,
+ 0x8D6: 84,
+ 0x8D7: 84,
+ 0x8D8: 84,
+ 0x8D9: 84,
+ 0x8DA: 84,
+ 0x8DB: 84,
+ 0x8DC: 84,
+ 0x8DD: 84,
+ 0x8DE: 84,
+ 0x8DF: 84,
+ 0x8E0: 84,
+ 0x8E1: 84,
+ 0x8E3: 84,
+ 0x8E4: 84,
+ 0x8E5: 84,
+ 0x8E6: 84,
+ 0x8E7: 84,
+ 0x8E8: 84,
+ 0x8E9: 84,
+ 0x8EA: 84,
+ 0x8EB: 84,
+ 0x8EC: 84,
+ 0x8ED: 84,
+ 0x8EE: 84,
+ 0x8EF: 84,
+ 0x8F0: 84,
+ 0x8F1: 84,
+ 0x8F2: 84,
+ 0x8F3: 84,
+ 0x8F4: 84,
+ 0x8F5: 84,
+ 0x8F6: 84,
+ 0x8F7: 84,
+ 0x8F8: 84,
+ 0x8F9: 84,
+ 0x8FA: 84,
+ 0x8FB: 84,
+ 0x8FC: 84,
+ 0x8FD: 84,
+ 0x8FE: 84,
+ 0x8FF: 84,
+ 0x900: 84,
+ 0x901: 84,
+ 0x902: 84,
+ 0x93A: 84,
+ 0x93C: 84,
+ 0x941: 84,
+ 0x942: 84,
+ 0x943: 84,
+ 0x944: 84,
+ 0x945: 84,
+ 0x946: 84,
+ 0x947: 84,
+ 0x948: 84,
+ 0x94D: 84,
+ 0x951: 84,
+ 0x952: 84,
+ 0x953: 84,
+ 0x954: 84,
+ 0x955: 84,
+ 0x956: 84,
+ 0x957: 84,
+ 0x962: 84,
+ 0x963: 84,
+ 0x981: 84,
+ 0x9BC: 84,
+ 0x9C1: 84,
+ 0x9C2: 84,
+ 0x9C3: 84,
+ 0x9C4: 84,
+ 0x9CD: 84,
+ 0x9E2: 84,
+ 0x9E3: 84,
+ 0x9FE: 84,
+ 0xA01: 84,
+ 0xA02: 84,
+ 0xA3C: 84,
+ 0xA41: 84,
+ 0xA42: 84,
+ 0xA47: 84,
+ 0xA48: 84,
+ 0xA4B: 84,
+ 0xA4C: 84,
+ 0xA4D: 84,
+ 0xA51: 84,
+ 0xA70: 84,
+ 0xA71: 84,
+ 0xA75: 84,
+ 0xA81: 84,
+ 0xA82: 84,
+ 0xABC: 84,
+ 0xAC1: 84,
+ 0xAC2: 84,
+ 0xAC3: 84,
+ 0xAC4: 84,
+ 0xAC5: 84,
+ 0xAC7: 84,
+ 0xAC8: 84,
+ 0xACD: 84,
+ 0xAE2: 84,
+ 0xAE3: 84,
+ 0xAFA: 84,
+ 0xAFB: 84,
+ 0xAFC: 84,
+ 0xAFD: 84,
+ 0xAFE: 84,
+ 0xAFF: 84,
+ 0xB01: 84,
+ 0xB3C: 84,
+ 0xB3F: 84,
+ 0xB41: 84,
+ 0xB42: 84,
+ 0xB43: 84,
+ 0xB44: 84,
+ 0xB4D: 84,
+ 0xB55: 84,
+ 0xB56: 84,
+ 0xB62: 84,
+ 0xB63: 84,
+ 0xB82: 84,
+ 0xBC0: 84,
+ 0xBCD: 84,
+ 0xC00: 84,
+ 0xC04: 84,
+ 0xC3C: 84,
+ 0xC3E: 84,
+ 0xC3F: 84,
+ 0xC40: 84,
+ 0xC46: 84,
+ 0xC47: 84,
+ 0xC48: 84,
+ 0xC4A: 84,
+ 0xC4B: 84,
+ 0xC4C: 84,
+ 0xC4D: 84,
+ 0xC55: 84,
+ 0xC56: 84,
+ 0xC62: 84,
+ 0xC63: 84,
+ 0xC81: 84,
+ 0xCBC: 84,
+ 0xCBF: 84,
+ 0xCC6: 84,
+ 0xCCC: 84,
+ 0xCCD: 84,
+ 0xCE2: 84,
+ 0xCE3: 84,
+ 0xD00: 84,
+ 0xD01: 84,
+ 0xD3B: 84,
+ 0xD3C: 84,
+ 0xD41: 84,
+ 0xD42: 84,
+ 0xD43: 84,
+ 0xD44: 84,
+ 0xD4D: 84,
+ 0xD62: 84,
+ 0xD63: 84,
+ 0xD81: 84,
+ 0xDCA: 84,
+ 0xDD2: 84,
+ 0xDD3: 84,
+ 0xDD4: 84,
+ 0xDD6: 84,
+ 0xE31: 84,
+ 0xE34: 84,
+ 0xE35: 84,
+ 0xE36: 84,
+ 0xE37: 84,
+ 0xE38: 84,
+ 0xE39: 84,
+ 0xE3A: 84,
+ 0xE47: 84,
+ 0xE48: 84,
+ 0xE49: 84,
+ 0xE4A: 84,
+ 0xE4B: 84,
+ 0xE4C: 84,
+ 0xE4D: 84,
+ 0xE4E: 84,
+ 0xEB1: 84,
+ 0xEB4: 84,
+ 0xEB5: 84,
+ 0xEB6: 84,
+ 0xEB7: 84,
+ 0xEB8: 84,
+ 0xEB9: 84,
+ 0xEBA: 84,
+ 0xEBB: 84,
+ 0xEBC: 84,
+ 0xEC8: 84,
+ 0xEC9: 84,
+ 0xECA: 84,
+ 0xECB: 84,
+ 0xECC: 84,
+ 0xECD: 84,
+ 0xECE: 84,
+ 0xF18: 84,
+ 0xF19: 84,
+ 0xF35: 84,
+ 0xF37: 84,
+ 0xF39: 84,
+ 0xF71: 84,
+ 0xF72: 84,
+ 0xF73: 84,
+ 0xF74: 84,
+ 0xF75: 84,
+ 0xF76: 84,
+ 0xF77: 84,
+ 0xF78: 84,
+ 0xF79: 84,
+ 0xF7A: 84,
+ 0xF7B: 84,
+ 0xF7C: 84,
+ 0xF7D: 84,
+ 0xF7E: 84,
+ 0xF80: 84,
+ 0xF81: 84,
+ 0xF82: 84,
+ 0xF83: 84,
+ 0xF84: 84,
+ 0xF86: 84,
+ 0xF87: 84,
+ 0xF8D: 84,
+ 0xF8E: 84,
+ 0xF8F: 84,
+ 0xF90: 84,
+ 0xF91: 84,
+ 0xF92: 84,
+ 0xF93: 84,
+ 0xF94: 84,
+ 0xF95: 84,
+ 0xF96: 84,
+ 0xF97: 84,
+ 0xF99: 84,
+ 0xF9A: 84,
+ 0xF9B: 84,
+ 0xF9C: 84,
+ 0xF9D: 84,
+ 0xF9E: 84,
+ 0xF9F: 84,
+ 0xFA0: 84,
+ 0xFA1: 84,
+ 0xFA2: 84,
+ 0xFA3: 84,
+ 0xFA4: 84,
+ 0xFA5: 84,
+ 0xFA6: 84,
+ 0xFA7: 84,
+ 0xFA8: 84,
+ 0xFA9: 84,
+ 0xFAA: 84,
+ 0xFAB: 84,
+ 0xFAC: 84,
+ 0xFAD: 84,
+ 0xFAE: 84,
+ 0xFAF: 84,
+ 0xFB0: 84,
+ 0xFB1: 84,
+ 0xFB2: 84,
+ 0xFB3: 84,
+ 0xFB4: 84,
+ 0xFB5: 84,
+ 0xFB6: 84,
+ 0xFB7: 84,
+ 0xFB8: 84,
+ 0xFB9: 84,
+ 0xFBA: 84,
+ 0xFBB: 84,
+ 0xFBC: 84,
+ 0xFC6: 84,
+ 0x102D: 84,
+ 0x102E: 84,
+ 0x102F: 84,
+ 0x1030: 84,
+ 0x1032: 84,
+ 0x1033: 84,
+ 0x1034: 84,
+ 0x1035: 84,
+ 0x1036: 84,
+ 0x1037: 84,
+ 0x1039: 84,
+ 0x103A: 84,
+ 0x103D: 84,
+ 0x103E: 84,
+ 0x1058: 84,
+ 0x1059: 84,
+ 0x105E: 84,
+ 0x105F: 84,
+ 0x1060: 84,
+ 0x1071: 84,
+ 0x1072: 84,
+ 0x1073: 84,
+ 0x1074: 84,
+ 0x1082: 84,
+ 0x1085: 84,
+ 0x1086: 84,
+ 0x108D: 84,
+ 0x109D: 84,
+ 0x135D: 84,
+ 0x135E: 84,
+ 0x135F: 84,
+ 0x1712: 84,
+ 0x1713: 84,
+ 0x1714: 84,
+ 0x1732: 84,
+ 0x1733: 84,
+ 0x1752: 84,
+ 0x1753: 84,
+ 0x1772: 84,
+ 0x1773: 84,
+ 0x17B4: 84,
+ 0x17B5: 84,
+ 0x17B7: 84,
+ 0x17B8: 84,
+ 0x17B9: 84,
+ 0x17BA: 84,
+ 0x17BB: 84,
+ 0x17BC: 84,
+ 0x17BD: 84,
+ 0x17C6: 84,
+ 0x17C9: 84,
+ 0x17CA: 84,
+ 0x17CB: 84,
+ 0x17CC: 84,
+ 0x17CD: 84,
+ 0x17CE: 84,
+ 0x17CF: 84,
+ 0x17D0: 84,
+ 0x17D1: 84,
+ 0x17D2: 84,
+ 0x17D3: 84,
+ 0x17DD: 84,
+ 0x1807: 68,
+ 0x180A: 67,
+ 0x180B: 84,
+ 0x180C: 84,
+ 0x180D: 84,
+ 0x180F: 84,
+ 0x1820: 68,
+ 0x1821: 68,
+ 0x1822: 68,
+ 0x1823: 68,
+ 0x1824: 68,
+ 0x1825: 68,
+ 0x1826: 68,
+ 0x1827: 68,
+ 0x1828: 68,
+ 0x1829: 68,
+ 0x182A: 68,
+ 0x182B: 68,
+ 0x182C: 68,
+ 0x182D: 68,
+ 0x182E: 68,
+ 0x182F: 68,
+ 0x1830: 68,
+ 0x1831: 68,
+ 0x1832: 68,
+ 0x1833: 68,
+ 0x1834: 68,
+ 0x1835: 68,
+ 0x1836: 68,
+ 0x1837: 68,
+ 0x1838: 68,
+ 0x1839: 68,
+ 0x183A: 68,
+ 0x183B: 68,
+ 0x183C: 68,
+ 0x183D: 68,
+ 0x183E: 68,
+ 0x183F: 68,
+ 0x1840: 68,
+ 0x1841: 68,
+ 0x1842: 68,
+ 0x1843: 68,
+ 0x1844: 68,
+ 0x1845: 68,
+ 0x1846: 68,
+ 0x1847: 68,
+ 0x1848: 68,
+ 0x1849: 68,
+ 0x184A: 68,
+ 0x184B: 68,
+ 0x184C: 68,
+ 0x184D: 68,
+ 0x184E: 68,
+ 0x184F: 68,
+ 0x1850: 68,
+ 0x1851: 68,
+ 0x1852: 68,
+ 0x1853: 68,
+ 0x1854: 68,
+ 0x1855: 68,
+ 0x1856: 68,
+ 0x1857: 68,
+ 0x1858: 68,
+ 0x1859: 68,
+ 0x185A: 68,
+ 0x185B: 68,
+ 0x185C: 68,
+ 0x185D: 68,
+ 0x185E: 68,
+ 0x185F: 68,
+ 0x1860: 68,
+ 0x1861: 68,
+ 0x1862: 68,
+ 0x1863: 68,
+ 0x1864: 68,
+ 0x1865: 68,
+ 0x1866: 68,
+ 0x1867: 68,
+ 0x1868: 68,
+ 0x1869: 68,
+ 0x186A: 68,
+ 0x186B: 68,
+ 0x186C: 68,
+ 0x186D: 68,
+ 0x186E: 68,
+ 0x186F: 68,
+ 0x1870: 68,
+ 0x1871: 68,
+ 0x1872: 68,
+ 0x1873: 68,
+ 0x1874: 68,
+ 0x1875: 68,
+ 0x1876: 68,
+ 0x1877: 68,
+ 0x1878: 68,
+ 0x1885: 84,
+ 0x1886: 84,
+ 0x1887: 68,
+ 0x1888: 68,
+ 0x1889: 68,
+ 0x188A: 68,
+ 0x188B: 68,
+ 0x188C: 68,
+ 0x188D: 68,
+ 0x188E: 68,
+ 0x188F: 68,
+ 0x1890: 68,
+ 0x1891: 68,
+ 0x1892: 68,
+ 0x1893: 68,
+ 0x1894: 68,
+ 0x1895: 68,
+ 0x1896: 68,
+ 0x1897: 68,
+ 0x1898: 68,
+ 0x1899: 68,
+ 0x189A: 68,
+ 0x189B: 68,
+ 0x189C: 68,
+ 0x189D: 68,
+ 0x189E: 68,
+ 0x189F: 68,
+ 0x18A0: 68,
+ 0x18A1: 68,
+ 0x18A2: 68,
+ 0x18A3: 68,
+ 0x18A4: 68,
+ 0x18A5: 68,
+ 0x18A6: 68,
+ 0x18A7: 68,
+ 0x18A8: 68,
+ 0x18A9: 84,
+ 0x18AA: 68,
+ 0x1920: 84,
+ 0x1921: 84,
+ 0x1922: 84,
+ 0x1927: 84,
+ 0x1928: 84,
+ 0x1932: 84,
+ 0x1939: 84,
+ 0x193A: 84,
+ 0x193B: 84,
+ 0x1A17: 84,
+ 0x1A18: 84,
+ 0x1A1B: 84,
+ 0x1A56: 84,
+ 0x1A58: 84,
+ 0x1A59: 84,
+ 0x1A5A: 84,
+ 0x1A5B: 84,
+ 0x1A5C: 84,
+ 0x1A5D: 84,
+ 0x1A5E: 84,
+ 0x1A60: 84,
+ 0x1A62: 84,
+ 0x1A65: 84,
+ 0x1A66: 84,
+ 0x1A67: 84,
+ 0x1A68: 84,
+ 0x1A69: 84,
+ 0x1A6A: 84,
+ 0x1A6B: 84,
+ 0x1A6C: 84,
+ 0x1A73: 84,
+ 0x1A74: 84,
+ 0x1A75: 84,
+ 0x1A76: 84,
+ 0x1A77: 84,
+ 0x1A78: 84,
+ 0x1A79: 84,
+ 0x1A7A: 84,
+ 0x1A7B: 84,
+ 0x1A7C: 84,
+ 0x1A7F: 84,
+ 0x1AB0: 84,
+ 0x1AB1: 84,
+ 0x1AB2: 84,
+ 0x1AB3: 84,
+ 0x1AB4: 84,
+ 0x1AB5: 84,
+ 0x1AB6: 84,
+ 0x1AB7: 84,
+ 0x1AB8: 84,
+ 0x1AB9: 84,
+ 0x1ABA: 84,
+ 0x1ABB: 84,
+ 0x1ABC: 84,
+ 0x1ABD: 84,
+ 0x1ABE: 84,
+ 0x1ABF: 84,
+ 0x1AC0: 84,
+ 0x1AC1: 84,
+ 0x1AC2: 84,
+ 0x1AC3: 84,
+ 0x1AC4: 84,
+ 0x1AC5: 84,
+ 0x1AC6: 84,
+ 0x1AC7: 84,
+ 0x1AC8: 84,
+ 0x1AC9: 84,
+ 0x1ACA: 84,
+ 0x1ACB: 84,
+ 0x1ACC: 84,
+ 0x1ACD: 84,
+ 0x1ACE: 84,
+ 0x1B00: 84,
+ 0x1B01: 84,
+ 0x1B02: 84,
+ 0x1B03: 84,
+ 0x1B34: 84,
+ 0x1B36: 84,
+ 0x1B37: 84,
+ 0x1B38: 84,
+ 0x1B39: 84,
+ 0x1B3A: 84,
+ 0x1B3C: 84,
+ 0x1B42: 84,
+ 0x1B6B: 84,
+ 0x1B6C: 84,
+ 0x1B6D: 84,
+ 0x1B6E: 84,
+ 0x1B6F: 84,
+ 0x1B70: 84,
+ 0x1B71: 84,
+ 0x1B72: 84,
+ 0x1B73: 84,
+ 0x1B80: 84,
+ 0x1B81: 84,
+ 0x1BA2: 84,
+ 0x1BA3: 84,
+ 0x1BA4: 84,
+ 0x1BA5: 84,
+ 0x1BA8: 84,
+ 0x1BA9: 84,
+ 0x1BAB: 84,
+ 0x1BAC: 84,
+ 0x1BAD: 84,
+ 0x1BE6: 84,
+ 0x1BE8: 84,
+ 0x1BE9: 84,
+ 0x1BED: 84,
+ 0x1BEF: 84,
+ 0x1BF0: 84,
+ 0x1BF1: 84,
+ 0x1C2C: 84,
+ 0x1C2D: 84,
+ 0x1C2E: 84,
+ 0x1C2F: 84,
+ 0x1C30: 84,
+ 0x1C31: 84,
+ 0x1C32: 84,
+ 0x1C33: 84,
+ 0x1C36: 84,
+ 0x1C37: 84,
+ 0x1CD0: 84,
+ 0x1CD1: 84,
+ 0x1CD2: 84,
+ 0x1CD4: 84,
+ 0x1CD5: 84,
+ 0x1CD6: 84,
+ 0x1CD7: 84,
+ 0x1CD8: 84,
+ 0x1CD9: 84,
+ 0x1CDA: 84,
+ 0x1CDB: 84,
+ 0x1CDC: 84,
+ 0x1CDD: 84,
+ 0x1CDE: 84,
+ 0x1CDF: 84,
+ 0x1CE0: 84,
+ 0x1CE2: 84,
+ 0x1CE3: 84,
+ 0x1CE4: 84,
+ 0x1CE5: 84,
+ 0x1CE6: 84,
+ 0x1CE7: 84,
+ 0x1CE8: 84,
+ 0x1CED: 84,
+ 0x1CF4: 84,
+ 0x1CF8: 84,
+ 0x1CF9: 84,
+ 0x1DC0: 84,
+ 0x1DC1: 84,
+ 0x1DC2: 84,
+ 0x1DC3: 84,
+ 0x1DC4: 84,
+ 0x1DC5: 84,
+ 0x1DC6: 84,
+ 0x1DC7: 84,
+ 0x1DC8: 84,
+ 0x1DC9: 84,
+ 0x1DCA: 84,
+ 0x1DCB: 84,
+ 0x1DCC: 84,
+ 0x1DCD: 84,
+ 0x1DCE: 84,
+ 0x1DCF: 84,
+ 0x1DD0: 84,
+ 0x1DD1: 84,
+ 0x1DD2: 84,
+ 0x1DD3: 84,
+ 0x1DD4: 84,
+ 0x1DD5: 84,
+ 0x1DD6: 84,
+ 0x1DD7: 84,
+ 0x1DD8: 84,
+ 0x1DD9: 84,
+ 0x1DDA: 84,
+ 0x1DDB: 84,
+ 0x1DDC: 84,
+ 0x1DDD: 84,
+ 0x1DDE: 84,
+ 0x1DDF: 84,
+ 0x1DE0: 84,
+ 0x1DE1: 84,
+ 0x1DE2: 84,
+ 0x1DE3: 84,
+ 0x1DE4: 84,
+ 0x1DE5: 84,
+ 0x1DE6: 84,
+ 0x1DE7: 84,
+ 0x1DE8: 84,
+ 0x1DE9: 84,
+ 0x1DEA: 84,
+ 0x1DEB: 84,
+ 0x1DEC: 84,
+ 0x1DED: 84,
+ 0x1DEE: 84,
+ 0x1DEF: 84,
+ 0x1DF0: 84,
+ 0x1DF1: 84,
+ 0x1DF2: 84,
+ 0x1DF3: 84,
+ 0x1DF4: 84,
+ 0x1DF5: 84,
+ 0x1DF6: 84,
+ 0x1DF7: 84,
+ 0x1DF8: 84,
+ 0x1DF9: 84,
+ 0x1DFA: 84,
+ 0x1DFB: 84,
+ 0x1DFC: 84,
+ 0x1DFD: 84,
+ 0x1DFE: 84,
+ 0x1DFF: 84,
+ 0x200B: 84,
+ 0x200D: 67,
+ 0x200E: 84,
+ 0x200F: 84,
+ 0x202A: 84,
+ 0x202B: 84,
+ 0x202C: 84,
+ 0x202D: 84,
+ 0x202E: 84,
+ 0x2060: 84,
+ 0x2061: 84,
+ 0x2062: 84,
+ 0x2063: 84,
+ 0x2064: 84,
+ 0x206A: 84,
+ 0x206B: 84,
+ 0x206C: 84,
+ 0x206D: 84,
+ 0x206E: 84,
+ 0x206F: 84,
+ 0x20D0: 84,
+ 0x20D1: 84,
+ 0x20D2: 84,
+ 0x20D3: 84,
+ 0x20D4: 84,
+ 0x20D5: 84,
+ 0x20D6: 84,
+ 0x20D7: 84,
+ 0x20D8: 84,
+ 0x20D9: 84,
+ 0x20DA: 84,
+ 0x20DB: 84,
+ 0x20DC: 84,
+ 0x20DD: 84,
+ 0x20DE: 84,
+ 0x20DF: 84,
+ 0x20E0: 84,
+ 0x20E1: 84,
+ 0x20E2: 84,
+ 0x20E3: 84,
+ 0x20E4: 84,
+ 0x20E5: 84,
+ 0x20E6: 84,
+ 0x20E7: 84,
+ 0x20E8: 84,
+ 0x20E9: 84,
+ 0x20EA: 84,
+ 0x20EB: 84,
+ 0x20EC: 84,
+ 0x20ED: 84,
+ 0x20EE: 84,
+ 0x20EF: 84,
+ 0x20F0: 84,
+ 0x2CEF: 84,
+ 0x2CF0: 84,
+ 0x2CF1: 84,
+ 0x2D7F: 84,
+ 0x2DE0: 84,
+ 0x2DE1: 84,
+ 0x2DE2: 84,
+ 0x2DE3: 84,
+ 0x2DE4: 84,
+ 0x2DE5: 84,
+ 0x2DE6: 84,
+ 0x2DE7: 84,
+ 0x2DE8: 84,
+ 0x2DE9: 84,
+ 0x2DEA: 84,
+ 0x2DEB: 84,
+ 0x2DEC: 84,
+ 0x2DED: 84,
+ 0x2DEE: 84,
+ 0x2DEF: 84,
+ 0x2DF0: 84,
+ 0x2DF1: 84,
+ 0x2DF2: 84,
+ 0x2DF3: 84,
+ 0x2DF4: 84,
+ 0x2DF5: 84,
+ 0x2DF6: 84,
+ 0x2DF7: 84,
+ 0x2DF8: 84,
+ 0x2DF9: 84,
+ 0x2DFA: 84,
+ 0x2DFB: 84,
+ 0x2DFC: 84,
+ 0x2DFD: 84,
+ 0x2DFE: 84,
+ 0x2DFF: 84,
+ 0x302A: 84,
+ 0x302B: 84,
+ 0x302C: 84,
+ 0x302D: 84,
+ 0x3099: 84,
+ 0x309A: 84,
+ 0xA66F: 84,
+ 0xA670: 84,
+ 0xA671: 84,
+ 0xA672: 84,
+ 0xA674: 84,
+ 0xA675: 84,
+ 0xA676: 84,
+ 0xA677: 84,
+ 0xA678: 84,
+ 0xA679: 84,
+ 0xA67A: 84,
+ 0xA67B: 84,
+ 0xA67C: 84,
+ 0xA67D: 84,
+ 0xA69E: 84,
+ 0xA69F: 84,
+ 0xA6F0: 84,
+ 0xA6F1: 84,
+ 0xA802: 84,
+ 0xA806: 84,
+ 0xA80B: 84,
+ 0xA825: 84,
+ 0xA826: 84,
+ 0xA82C: 84,
+ 0xA840: 68,
+ 0xA841: 68,
+ 0xA842: 68,
+ 0xA843: 68,
+ 0xA844: 68,
+ 0xA845: 68,
+ 0xA846: 68,
+ 0xA847: 68,
+ 0xA848: 68,
+ 0xA849: 68,
+ 0xA84A: 68,
+ 0xA84B: 68,
+ 0xA84C: 68,
+ 0xA84D: 68,
+ 0xA84E: 68,
+ 0xA84F: 68,
+ 0xA850: 68,
+ 0xA851: 68,
+ 0xA852: 68,
+ 0xA853: 68,
+ 0xA854: 68,
+ 0xA855: 68,
+ 0xA856: 68,
+ 0xA857: 68,
+ 0xA858: 68,
+ 0xA859: 68,
+ 0xA85A: 68,
+ 0xA85B: 68,
+ 0xA85C: 68,
+ 0xA85D: 68,
+ 0xA85E: 68,
+ 0xA85F: 68,
+ 0xA860: 68,
+ 0xA861: 68,
+ 0xA862: 68,
+ 0xA863: 68,
+ 0xA864: 68,
+ 0xA865: 68,
+ 0xA866: 68,
+ 0xA867: 68,
+ 0xA868: 68,
+ 0xA869: 68,
+ 0xA86A: 68,
+ 0xA86B: 68,
+ 0xA86C: 68,
+ 0xA86D: 68,
+ 0xA86E: 68,
+ 0xA86F: 68,
+ 0xA870: 68,
+ 0xA871: 68,
+ 0xA872: 76,
+ 0xA8C4: 84,
+ 0xA8C5: 84,
+ 0xA8E0: 84,
+ 0xA8E1: 84,
+ 0xA8E2: 84,
+ 0xA8E3: 84,
+ 0xA8E4: 84,
+ 0xA8E5: 84,
+ 0xA8E6: 84,
+ 0xA8E7: 84,
+ 0xA8E8: 84,
+ 0xA8E9: 84,
+ 0xA8EA: 84,
+ 0xA8EB: 84,
+ 0xA8EC: 84,
+ 0xA8ED: 84,
+ 0xA8EE: 84,
+ 0xA8EF: 84,
+ 0xA8F0: 84,
+ 0xA8F1: 84,
+ 0xA8FF: 84,
+ 0xA926: 84,
+ 0xA927: 84,
+ 0xA928: 84,
+ 0xA929: 84,
+ 0xA92A: 84,
+ 0xA92B: 84,
+ 0xA92C: 84,
+ 0xA92D: 84,
+ 0xA947: 84,
+ 0xA948: 84,
+ 0xA949: 84,
+ 0xA94A: 84,
+ 0xA94B: 84,
+ 0xA94C: 84,
+ 0xA94D: 84,
+ 0xA94E: 84,
+ 0xA94F: 84,
+ 0xA950: 84,
+ 0xA951: 84,
+ 0xA980: 84,
+ 0xA981: 84,
+ 0xA982: 84,
+ 0xA9B3: 84,
+ 0xA9B6: 84,
+ 0xA9B7: 84,
+ 0xA9B8: 84,
+ 0xA9B9: 84,
+ 0xA9BC: 84,
+ 0xA9BD: 84,
+ 0xA9E5: 84,
+ 0xAA29: 84,
+ 0xAA2A: 84,
+ 0xAA2B: 84,
+ 0xAA2C: 84,
+ 0xAA2D: 84,
+ 0xAA2E: 84,
+ 0xAA31: 84,
+ 0xAA32: 84,
+ 0xAA35: 84,
+ 0xAA36: 84,
+ 0xAA43: 84,
+ 0xAA4C: 84,
+ 0xAA7C: 84,
+ 0xAAB0: 84,
+ 0xAAB2: 84,
+ 0xAAB3: 84,
+ 0xAAB4: 84,
+ 0xAAB7: 84,
+ 0xAAB8: 84,
+ 0xAABE: 84,
+ 0xAABF: 84,
+ 0xAAC1: 84,
+ 0xAAEC: 84,
+ 0xAAED: 84,
+ 0xAAF6: 84,
+ 0xABE5: 84,
+ 0xABE8: 84,
+ 0xABED: 84,
+ 0xFB1E: 84,
+ 0xFE00: 84,
+ 0xFE01: 84,
+ 0xFE02: 84,
+ 0xFE03: 84,
+ 0xFE04: 84,
+ 0xFE05: 84,
+ 0xFE06: 84,
+ 0xFE07: 84,
+ 0xFE08: 84,
+ 0xFE09: 84,
+ 0xFE0A: 84,
+ 0xFE0B: 84,
+ 0xFE0C: 84,
+ 0xFE0D: 84,
+ 0xFE0E: 84,
+ 0xFE0F: 84,
+ 0xFE20: 84,
+ 0xFE21: 84,
+ 0xFE22: 84,
+ 0xFE23: 84,
+ 0xFE24: 84,
+ 0xFE25: 84,
+ 0xFE26: 84,
+ 0xFE27: 84,
+ 0xFE28: 84,
+ 0xFE29: 84,
+ 0xFE2A: 84,
+ 0xFE2B: 84,
+ 0xFE2C: 84,
+ 0xFE2D: 84,
+ 0xFE2E: 84,
+ 0xFE2F: 84,
+ 0xFEFF: 84,
+ 0xFFF9: 84,
+ 0xFFFA: 84,
+ 0xFFFB: 84,
+ 0x101FD: 84,
+ 0x102E0: 84,
+ 0x10376: 84,
+ 0x10377: 84,
+ 0x10378: 84,
+ 0x10379: 84,
+ 0x1037A: 84,
+ 0x10A01: 84,
+ 0x10A02: 84,
+ 0x10A03: 84,
+ 0x10A05: 84,
+ 0x10A06: 84,
+ 0x10A0C: 84,
+ 0x10A0D: 84,
+ 0x10A0E: 84,
+ 0x10A0F: 84,
+ 0x10A38: 84,
+ 0x10A39: 84,
+ 0x10A3A: 84,
+ 0x10A3F: 84,
+ 0x10AC0: 68,
+ 0x10AC1: 68,
+ 0x10AC2: 68,
+ 0x10AC3: 68,
+ 0x10AC4: 68,
+ 0x10AC5: 82,
+ 0x10AC7: 82,
+ 0x10AC9: 82,
+ 0x10ACA: 82,
+ 0x10ACD: 76,
+ 0x10ACE: 82,
+ 0x10ACF: 82,
+ 0x10AD0: 82,
+ 0x10AD1: 82,
+ 0x10AD2: 82,
+ 0x10AD3: 68,
+ 0x10AD4: 68,
+ 0x10AD5: 68,
+ 0x10AD6: 68,
+ 0x10AD7: 76,
+ 0x10AD8: 68,
+ 0x10AD9: 68,
+ 0x10ADA: 68,
+ 0x10ADB: 68,
+ 0x10ADC: 68,
+ 0x10ADD: 82,
+ 0x10ADE: 68,
+ 0x10ADF: 68,
+ 0x10AE0: 68,
+ 0x10AE1: 82,
+ 0x10AE4: 82,
+ 0x10AE5: 84,
+ 0x10AE6: 84,
+ 0x10AEB: 68,
+ 0x10AEC: 68,
+ 0x10AED: 68,
+ 0x10AEE: 68,
+ 0x10AEF: 82,
+ 0x10B80: 68,
+ 0x10B81: 82,
+ 0x10B82: 68,
+ 0x10B83: 82,
+ 0x10B84: 82,
+ 0x10B85: 82,
+ 0x10B86: 68,
+ 0x10B87: 68,
+ 0x10B88: 68,
+ 0x10B89: 82,
+ 0x10B8A: 68,
+ 0x10B8B: 68,
+ 0x10B8C: 82,
+ 0x10B8D: 68,
+ 0x10B8E: 82,
+ 0x10B8F: 82,
+ 0x10B90: 68,
+ 0x10B91: 82,
+ 0x10BA9: 82,
+ 0x10BAA: 82,
+ 0x10BAB: 82,
+ 0x10BAC: 82,
+ 0x10BAD: 68,
+ 0x10BAE: 68,
+ 0x10D00: 76,
+ 0x10D01: 68,
+ 0x10D02: 68,
+ 0x10D03: 68,
+ 0x10D04: 68,
+ 0x10D05: 68,
+ 0x10D06: 68,
+ 0x10D07: 68,
+ 0x10D08: 68,
+ 0x10D09: 68,
+ 0x10D0A: 68,
+ 0x10D0B: 68,
+ 0x10D0C: 68,
+ 0x10D0D: 68,
+ 0x10D0E: 68,
+ 0x10D0F: 68,
+ 0x10D10: 68,
+ 0x10D11: 68,
+ 0x10D12: 68,
+ 0x10D13: 68,
+ 0x10D14: 68,
+ 0x10D15: 68,
+ 0x10D16: 68,
+ 0x10D17: 68,
+ 0x10D18: 68,
+ 0x10D19: 68,
+ 0x10D1A: 68,
+ 0x10D1B: 68,
+ 0x10D1C: 68,
+ 0x10D1D: 68,
+ 0x10D1E: 68,
+ 0x10D1F: 68,
+ 0x10D20: 68,
+ 0x10D21: 68,
+ 0x10D22: 82,
+ 0x10D23: 68,
+ 0x10D24: 84,
+ 0x10D25: 84,
+ 0x10D26: 84,
+ 0x10D27: 84,
+ 0x10D69: 84,
+ 0x10D6A: 84,
+ 0x10D6B: 84,
+ 0x10D6C: 84,
+ 0x10D6D: 84,
+ 0x10EAB: 84,
+ 0x10EAC: 84,
+ 0x10EC2: 82,
+ 0x10EC3: 68,
+ 0x10EC4: 68,
+ 0x10EFC: 84,
+ 0x10EFD: 84,
+ 0x10EFE: 84,
+ 0x10EFF: 84,
+ 0x10F30: 68,
+ 0x10F31: 68,
+ 0x10F32: 68,
+ 0x10F33: 82,
+ 0x10F34: 68,
+ 0x10F35: 68,
+ 0x10F36: 68,
+ 0x10F37: 68,
+ 0x10F38: 68,
+ 0x10F39: 68,
+ 0x10F3A: 68,
+ 0x10F3B: 68,
+ 0x10F3C: 68,
+ 0x10F3D: 68,
+ 0x10F3E: 68,
+ 0x10F3F: 68,
+ 0x10F40: 68,
+ 0x10F41: 68,
+ 0x10F42: 68,
+ 0x10F43: 68,
+ 0x10F44: 68,
+ 0x10F46: 84,
+ 0x10F47: 84,
+ 0x10F48: 84,
+ 0x10F49: 84,
+ 0x10F4A: 84,
+ 0x10F4B: 84,
+ 0x10F4C: 84,
+ 0x10F4D: 84,
+ 0x10F4E: 84,
+ 0x10F4F: 84,
+ 0x10F50: 84,
+ 0x10F51: 68,
+ 0x10F52: 68,
+ 0x10F53: 68,
+ 0x10F54: 82,
+ 0x10F70: 68,
+ 0x10F71: 68,
+ 0x10F72: 68,
+ 0x10F73: 68,
+ 0x10F74: 82,
+ 0x10F75: 82,
+ 0x10F76: 68,
+ 0x10F77: 68,
+ 0x10F78: 68,
+ 0x10F79: 68,
+ 0x10F7A: 68,
+ 0x10F7B: 68,
+ 0x10F7C: 68,
+ 0x10F7D: 68,
+ 0x10F7E: 68,
+ 0x10F7F: 68,
+ 0x10F80: 68,
+ 0x10F81: 68,
+ 0x10F82: 84,
+ 0x10F83: 84,
+ 0x10F84: 84,
+ 0x10F85: 84,
+ 0x10FB0: 68,
+ 0x10FB2: 68,
+ 0x10FB3: 68,
+ 0x10FB4: 82,
+ 0x10FB5: 82,
+ 0x10FB6: 82,
+ 0x10FB8: 68,
+ 0x10FB9: 82,
+ 0x10FBA: 82,
+ 0x10FBB: 68,
+ 0x10FBC: 68,
+ 0x10FBD: 82,
+ 0x10FBE: 68,
+ 0x10FBF: 68,
+ 0x10FC1: 68,
+ 0x10FC2: 82,
+ 0x10FC3: 82,
+ 0x10FC4: 68,
+ 0x10FC9: 82,
+ 0x10FCA: 68,
+ 0x10FCB: 76,
+ 0x11001: 84,
+ 0x11038: 84,
+ 0x11039: 84,
+ 0x1103A: 84,
+ 0x1103B: 84,
+ 0x1103C: 84,
+ 0x1103D: 84,
+ 0x1103E: 84,
+ 0x1103F: 84,
+ 0x11040: 84,
+ 0x11041: 84,
+ 0x11042: 84,
+ 0x11043: 84,
+ 0x11044: 84,
+ 0x11045: 84,
+ 0x11046: 84,
+ 0x11070: 84,
+ 0x11073: 84,
+ 0x11074: 84,
+ 0x1107F: 84,
+ 0x11080: 84,
+ 0x11081: 84,
+ 0x110B3: 84,
+ 0x110B4: 84,
+ 0x110B5: 84,
+ 0x110B6: 84,
+ 0x110B9: 84,
+ 0x110BA: 84,
+ 0x110C2: 84,
+ 0x11100: 84,
+ 0x11101: 84,
+ 0x11102: 84,
+ 0x11127: 84,
+ 0x11128: 84,
+ 0x11129: 84,
+ 0x1112A: 84,
+ 0x1112B: 84,
+ 0x1112D: 84,
+ 0x1112E: 84,
+ 0x1112F: 84,
+ 0x11130: 84,
+ 0x11131: 84,
+ 0x11132: 84,
+ 0x11133: 84,
+ 0x11134: 84,
+ 0x11173: 84,
+ 0x11180: 84,
+ 0x11181: 84,
+ 0x111B6: 84,
+ 0x111B7: 84,
+ 0x111B8: 84,
+ 0x111B9: 84,
+ 0x111BA: 84,
+ 0x111BB: 84,
+ 0x111BC: 84,
+ 0x111BD: 84,
+ 0x111BE: 84,
+ 0x111C9: 84,
+ 0x111CA: 84,
+ 0x111CB: 84,
+ 0x111CC: 84,
+ 0x111CF: 84,
+ 0x1122F: 84,
+ 0x11230: 84,
+ 0x11231: 84,
+ 0x11234: 84,
+ 0x11236: 84,
+ 0x11237: 84,
+ 0x1123E: 84,
+ 0x11241: 84,
+ 0x112DF: 84,
+ 0x112E3: 84,
+ 0x112E4: 84,
+ 0x112E5: 84,
+ 0x112E6: 84,
+ 0x112E7: 84,
+ 0x112E8: 84,
+ 0x112E9: 84,
+ 0x112EA: 84,
+ 0x11300: 84,
+ 0x11301: 84,
+ 0x1133B: 84,
+ 0x1133C: 84,
+ 0x11340: 84,
+ 0x11366: 84,
+ 0x11367: 84,
+ 0x11368: 84,
+ 0x11369: 84,
+ 0x1136A: 84,
+ 0x1136B: 84,
+ 0x1136C: 84,
+ 0x11370: 84,
+ 0x11371: 84,
+ 0x11372: 84,
+ 0x11373: 84,
+ 0x11374: 84,
+ 0x113BB: 84,
+ 0x113BC: 84,
+ 0x113BD: 84,
+ 0x113BE: 84,
+ 0x113BF: 84,
+ 0x113C0: 84,
+ 0x113CE: 84,
+ 0x113D0: 84,
+ 0x113D2: 84,
+ 0x113E1: 84,
+ 0x113E2: 84,
+ 0x11438: 84,
+ 0x11439: 84,
+ 0x1143A: 84,
+ 0x1143B: 84,
+ 0x1143C: 84,
+ 0x1143D: 84,
+ 0x1143E: 84,
+ 0x1143F: 84,
+ 0x11442: 84,
+ 0x11443: 84,
+ 0x11444: 84,
+ 0x11446: 84,
+ 0x1145E: 84,
+ 0x114B3: 84,
+ 0x114B4: 84,
+ 0x114B5: 84,
+ 0x114B6: 84,
+ 0x114B7: 84,
+ 0x114B8: 84,
+ 0x114BA: 84,
+ 0x114BF: 84,
+ 0x114C0: 84,
+ 0x114C2: 84,
+ 0x114C3: 84,
+ 0x115B2: 84,
+ 0x115B3: 84,
+ 0x115B4: 84,
+ 0x115B5: 84,
+ 0x115BC: 84,
+ 0x115BD: 84,
+ 0x115BF: 84,
+ 0x115C0: 84,
+ 0x115DC: 84,
+ 0x115DD: 84,
+ 0x11633: 84,
+ 0x11634: 84,
+ 0x11635: 84,
+ 0x11636: 84,
+ 0x11637: 84,
+ 0x11638: 84,
+ 0x11639: 84,
+ 0x1163A: 84,
+ 0x1163D: 84,
+ 0x1163F: 84,
+ 0x11640: 84,
+ 0x116AB: 84,
+ 0x116AD: 84,
+ 0x116B0: 84,
+ 0x116B1: 84,
+ 0x116B2: 84,
+ 0x116B3: 84,
+ 0x116B4: 84,
+ 0x116B5: 84,
+ 0x116B7: 84,
+ 0x1171D: 84,
+ 0x1171F: 84,
+ 0x11722: 84,
+ 0x11723: 84,
+ 0x11724: 84,
+ 0x11725: 84,
+ 0x11727: 84,
+ 0x11728: 84,
+ 0x11729: 84,
+ 0x1172A: 84,
+ 0x1172B: 84,
+ 0x1182F: 84,
+ 0x11830: 84,
+ 0x11831: 84,
+ 0x11832: 84,
+ 0x11833: 84,
+ 0x11834: 84,
+ 0x11835: 84,
+ 0x11836: 84,
+ 0x11837: 84,
+ 0x11839: 84,
+ 0x1183A: 84,
+ 0x1193B: 84,
+ 0x1193C: 84,
+ 0x1193E: 84,
+ 0x11943: 84,
+ 0x119D4: 84,
+ 0x119D5: 84,
+ 0x119D6: 84,
+ 0x119D7: 84,
+ 0x119DA: 84,
+ 0x119DB: 84,
+ 0x119E0: 84,
+ 0x11A01: 84,
+ 0x11A02: 84,
+ 0x11A03: 84,
+ 0x11A04: 84,
+ 0x11A05: 84,
+ 0x11A06: 84,
+ 0x11A07: 84,
+ 0x11A08: 84,
+ 0x11A09: 84,
+ 0x11A0A: 84,
+ 0x11A33: 84,
+ 0x11A34: 84,
+ 0x11A35: 84,
+ 0x11A36: 84,
+ 0x11A37: 84,
+ 0x11A38: 84,
+ 0x11A3B: 84,
+ 0x11A3C: 84,
+ 0x11A3D: 84,
+ 0x11A3E: 84,
+ 0x11A47: 84,
+ 0x11A51: 84,
+ 0x11A52: 84,
+ 0x11A53: 84,
+ 0x11A54: 84,
+ 0x11A55: 84,
+ 0x11A56: 84,
+ 0x11A59: 84,
+ 0x11A5A: 84,
+ 0x11A5B: 84,
+ 0x11A8A: 84,
+ 0x11A8B: 84,
+ 0x11A8C: 84,
+ 0x11A8D: 84,
+ 0x11A8E: 84,
+ 0x11A8F: 84,
+ 0x11A90: 84,
+ 0x11A91: 84,
+ 0x11A92: 84,
+ 0x11A93: 84,
+ 0x11A94: 84,
+ 0x11A95: 84,
+ 0x11A96: 84,
+ 0x11A98: 84,
+ 0x11A99: 84,
+ 0x11C30: 84,
+ 0x11C31: 84,
+ 0x11C32: 84,
+ 0x11C33: 84,
+ 0x11C34: 84,
+ 0x11C35: 84,
+ 0x11C36: 84,
+ 0x11C38: 84,
+ 0x11C39: 84,
+ 0x11C3A: 84,
+ 0x11C3B: 84,
+ 0x11C3C: 84,
+ 0x11C3D: 84,
+ 0x11C3F: 84,
+ 0x11C92: 84,
+ 0x11C93: 84,
+ 0x11C94: 84,
+ 0x11C95: 84,
+ 0x11C96: 84,
+ 0x11C97: 84,
+ 0x11C98: 84,
+ 0x11C99: 84,
+ 0x11C9A: 84,
+ 0x11C9B: 84,
+ 0x11C9C: 84,
+ 0x11C9D: 84,
+ 0x11C9E: 84,
+ 0x11C9F: 84,
+ 0x11CA0: 84,
+ 0x11CA1: 84,
+ 0x11CA2: 84,
+ 0x11CA3: 84,
+ 0x11CA4: 84,
+ 0x11CA5: 84,
+ 0x11CA6: 84,
+ 0x11CA7: 84,
+ 0x11CAA: 84,
+ 0x11CAB: 84,
+ 0x11CAC: 84,
+ 0x11CAD: 84,
+ 0x11CAE: 84,
+ 0x11CAF: 84,
+ 0x11CB0: 84,
+ 0x11CB2: 84,
+ 0x11CB3: 84,
+ 0x11CB5: 84,
+ 0x11CB6: 84,
+ 0x11D31: 84,
+ 0x11D32: 84,
+ 0x11D33: 84,
+ 0x11D34: 84,
+ 0x11D35: 84,
+ 0x11D36: 84,
+ 0x11D3A: 84,
+ 0x11D3C: 84,
+ 0x11D3D: 84,
+ 0x11D3F: 84,
+ 0x11D40: 84,
+ 0x11D41: 84,
+ 0x11D42: 84,
+ 0x11D43: 84,
+ 0x11D44: 84,
+ 0x11D45: 84,
+ 0x11D47: 84,
+ 0x11D90: 84,
+ 0x11D91: 84,
+ 0x11D95: 84,
+ 0x11D97: 84,
+ 0x11EF3: 84,
+ 0x11EF4: 84,
+ 0x11F00: 84,
+ 0x11F01: 84,
+ 0x11F36: 84,
+ 0x11F37: 84,
+ 0x11F38: 84,
+ 0x11F39: 84,
+ 0x11F3A: 84,
+ 0x11F40: 84,
+ 0x11F42: 84,
+ 0x11F5A: 84,
+ 0x13430: 84,
+ 0x13431: 84,
+ 0x13432: 84,
+ 0x13433: 84,
+ 0x13434: 84,
+ 0x13435: 84,
+ 0x13436: 84,
+ 0x13437: 84,
+ 0x13438: 84,
+ 0x13439: 84,
+ 0x1343A: 84,
+ 0x1343B: 84,
+ 0x1343C: 84,
+ 0x1343D: 84,
+ 0x1343E: 84,
+ 0x1343F: 84,
+ 0x13440: 84,
+ 0x13447: 84,
+ 0x13448: 84,
+ 0x13449: 84,
+ 0x1344A: 84,
+ 0x1344B: 84,
+ 0x1344C: 84,
+ 0x1344D: 84,
+ 0x1344E: 84,
+ 0x1344F: 84,
+ 0x13450: 84,
+ 0x13451: 84,
+ 0x13452: 84,
+ 0x13453: 84,
+ 0x13454: 84,
+ 0x13455: 84,
+ 0x1611E: 84,
+ 0x1611F: 84,
+ 0x16120: 84,
+ 0x16121: 84,
+ 0x16122: 84,
+ 0x16123: 84,
+ 0x16124: 84,
+ 0x16125: 84,
+ 0x16126: 84,
+ 0x16127: 84,
+ 0x16128: 84,
+ 0x16129: 84,
+ 0x1612D: 84,
+ 0x1612E: 84,
+ 0x1612F: 84,
+ 0x16AF0: 84,
+ 0x16AF1: 84,
+ 0x16AF2: 84,
+ 0x16AF3: 84,
+ 0x16AF4: 84,
+ 0x16B30: 84,
+ 0x16B31: 84,
+ 0x16B32: 84,
+ 0x16B33: 84,
+ 0x16B34: 84,
+ 0x16B35: 84,
+ 0x16B36: 84,
+ 0x16F4F: 84,
+ 0x16F8F: 84,
+ 0x16F90: 84,
+ 0x16F91: 84,
+ 0x16F92: 84,
+ 0x16FE4: 84,
+ 0x1BC9D: 84,
+ 0x1BC9E: 84,
+ 0x1BCA0: 84,
+ 0x1BCA1: 84,
+ 0x1BCA2: 84,
+ 0x1BCA3: 84,
+ 0x1CF00: 84,
+ 0x1CF01: 84,
+ 0x1CF02: 84,
+ 0x1CF03: 84,
+ 0x1CF04: 84,
+ 0x1CF05: 84,
+ 0x1CF06: 84,
+ 0x1CF07: 84,
+ 0x1CF08: 84,
+ 0x1CF09: 84,
+ 0x1CF0A: 84,
+ 0x1CF0B: 84,
+ 0x1CF0C: 84,
+ 0x1CF0D: 84,
+ 0x1CF0E: 84,
+ 0x1CF0F: 84,
+ 0x1CF10: 84,
+ 0x1CF11: 84,
+ 0x1CF12: 84,
+ 0x1CF13: 84,
+ 0x1CF14: 84,
+ 0x1CF15: 84,
+ 0x1CF16: 84,
+ 0x1CF17: 84,
+ 0x1CF18: 84,
+ 0x1CF19: 84,
+ 0x1CF1A: 84,
+ 0x1CF1B: 84,
+ 0x1CF1C: 84,
+ 0x1CF1D: 84,
+ 0x1CF1E: 84,
+ 0x1CF1F: 84,
+ 0x1CF20: 84,
+ 0x1CF21: 84,
+ 0x1CF22: 84,
+ 0x1CF23: 84,
+ 0x1CF24: 84,
+ 0x1CF25: 84,
+ 0x1CF26: 84,
+ 0x1CF27: 84,
+ 0x1CF28: 84,
+ 0x1CF29: 84,
+ 0x1CF2A: 84,
+ 0x1CF2B: 84,
+ 0x1CF2C: 84,
+ 0x1CF2D: 84,
+ 0x1CF30: 84,
+ 0x1CF31: 84,
+ 0x1CF32: 84,
+ 0x1CF33: 84,
+ 0x1CF34: 84,
+ 0x1CF35: 84,
+ 0x1CF36: 84,
+ 0x1CF37: 84,
+ 0x1CF38: 84,
+ 0x1CF39: 84,
+ 0x1CF3A: 84,
+ 0x1CF3B: 84,
+ 0x1CF3C: 84,
+ 0x1CF3D: 84,
+ 0x1CF3E: 84,
+ 0x1CF3F: 84,
+ 0x1CF40: 84,
+ 0x1CF41: 84,
+ 0x1CF42: 84,
+ 0x1CF43: 84,
+ 0x1CF44: 84,
+ 0x1CF45: 84,
+ 0x1CF46: 84,
+ 0x1D167: 84,
+ 0x1D168: 84,
+ 0x1D169: 84,
+ 0x1D173: 84,
+ 0x1D174: 84,
+ 0x1D175: 84,
+ 0x1D176: 84,
+ 0x1D177: 84,
+ 0x1D178: 84,
+ 0x1D179: 84,
+ 0x1D17A: 84,
+ 0x1D17B: 84,
+ 0x1D17C: 84,
+ 0x1D17D: 84,
+ 0x1D17E: 84,
+ 0x1D17F: 84,
+ 0x1D180: 84,
+ 0x1D181: 84,
+ 0x1D182: 84,
+ 0x1D185: 84,
+ 0x1D186: 84,
+ 0x1D187: 84,
+ 0x1D188: 84,
+ 0x1D189: 84,
+ 0x1D18A: 84,
+ 0x1D18B: 84,
+ 0x1D1AA: 84,
+ 0x1D1AB: 84,
+ 0x1D1AC: 84,
+ 0x1D1AD: 84,
+ 0x1D242: 84,
+ 0x1D243: 84,
+ 0x1D244: 84,
+ 0x1DA00: 84,
+ 0x1DA01: 84,
+ 0x1DA02: 84,
+ 0x1DA03: 84,
+ 0x1DA04: 84,
+ 0x1DA05: 84,
+ 0x1DA06: 84,
+ 0x1DA07: 84,
+ 0x1DA08: 84,
+ 0x1DA09: 84,
+ 0x1DA0A: 84,
+ 0x1DA0B: 84,
+ 0x1DA0C: 84,
+ 0x1DA0D: 84,
+ 0x1DA0E: 84,
+ 0x1DA0F: 84,
+ 0x1DA10: 84,
+ 0x1DA11: 84,
+ 0x1DA12: 84,
+ 0x1DA13: 84,
+ 0x1DA14: 84,
+ 0x1DA15: 84,
+ 0x1DA16: 84,
+ 0x1DA17: 84,
+ 0x1DA18: 84,
+ 0x1DA19: 84,
+ 0x1DA1A: 84,
+ 0x1DA1B: 84,
+ 0x1DA1C: 84,
+ 0x1DA1D: 84,
+ 0x1DA1E: 84,
+ 0x1DA1F: 84,
+ 0x1DA20: 84,
+ 0x1DA21: 84,
+ 0x1DA22: 84,
+ 0x1DA23: 84,
+ 0x1DA24: 84,
+ 0x1DA25: 84,
+ 0x1DA26: 84,
+ 0x1DA27: 84,
+ 0x1DA28: 84,
+ 0x1DA29: 84,
+ 0x1DA2A: 84,
+ 0x1DA2B: 84,
+ 0x1DA2C: 84,
+ 0x1DA2D: 84,
+ 0x1DA2E: 84,
+ 0x1DA2F: 84,
+ 0x1DA30: 84,
+ 0x1DA31: 84,
+ 0x1DA32: 84,
+ 0x1DA33: 84,
+ 0x1DA34: 84,
+ 0x1DA35: 84,
+ 0x1DA36: 84,
+ 0x1DA3B: 84,
+ 0x1DA3C: 84,
+ 0x1DA3D: 84,
+ 0x1DA3E: 84,
+ 0x1DA3F: 84,
+ 0x1DA40: 84,
+ 0x1DA41: 84,
+ 0x1DA42: 84,
+ 0x1DA43: 84,
+ 0x1DA44: 84,
+ 0x1DA45: 84,
+ 0x1DA46: 84,
+ 0x1DA47: 84,
+ 0x1DA48: 84,
+ 0x1DA49: 84,
+ 0x1DA4A: 84,
+ 0x1DA4B: 84,
+ 0x1DA4C: 84,
+ 0x1DA4D: 84,
+ 0x1DA4E: 84,
+ 0x1DA4F: 84,
+ 0x1DA50: 84,
+ 0x1DA51: 84,
+ 0x1DA52: 84,
+ 0x1DA53: 84,
+ 0x1DA54: 84,
+ 0x1DA55: 84,
+ 0x1DA56: 84,
+ 0x1DA57: 84,
+ 0x1DA58: 84,
+ 0x1DA59: 84,
+ 0x1DA5A: 84,
+ 0x1DA5B: 84,
+ 0x1DA5C: 84,
+ 0x1DA5D: 84,
+ 0x1DA5E: 84,
+ 0x1DA5F: 84,
+ 0x1DA60: 84,
+ 0x1DA61: 84,
+ 0x1DA62: 84,
+ 0x1DA63: 84,
+ 0x1DA64: 84,
+ 0x1DA65: 84,
+ 0x1DA66: 84,
+ 0x1DA67: 84,
+ 0x1DA68: 84,
+ 0x1DA69: 84,
+ 0x1DA6A: 84,
+ 0x1DA6B: 84,
+ 0x1DA6C: 84,
+ 0x1DA75: 84,
+ 0x1DA84: 84,
+ 0x1DA9B: 84,
+ 0x1DA9C: 84,
+ 0x1DA9D: 84,
+ 0x1DA9E: 84,
+ 0x1DA9F: 84,
+ 0x1DAA1: 84,
+ 0x1DAA2: 84,
+ 0x1DAA3: 84,
+ 0x1DAA4: 84,
+ 0x1DAA5: 84,
+ 0x1DAA6: 84,
+ 0x1DAA7: 84,
+ 0x1DAA8: 84,
+ 0x1DAA9: 84,
+ 0x1DAAA: 84,
+ 0x1DAAB: 84,
+ 0x1DAAC: 84,
+ 0x1DAAD: 84,
+ 0x1DAAE: 84,
+ 0x1DAAF: 84,
+ 0x1E000: 84,
+ 0x1E001: 84,
+ 0x1E002: 84,
+ 0x1E003: 84,
+ 0x1E004: 84,
+ 0x1E005: 84,
+ 0x1E006: 84,
+ 0x1E008: 84,
+ 0x1E009: 84,
+ 0x1E00A: 84,
+ 0x1E00B: 84,
+ 0x1E00C: 84,
+ 0x1E00D: 84,
+ 0x1E00E: 84,
+ 0x1E00F: 84,
+ 0x1E010: 84,
+ 0x1E011: 84,
+ 0x1E012: 84,
+ 0x1E013: 84,
+ 0x1E014: 84,
+ 0x1E015: 84,
+ 0x1E016: 84,
+ 0x1E017: 84,
+ 0x1E018: 84,
+ 0x1E01B: 84,
+ 0x1E01C: 84,
+ 0x1E01D: 84,
+ 0x1E01E: 84,
+ 0x1E01F: 84,
+ 0x1E020: 84,
+ 0x1E021: 84,
+ 0x1E023: 84,
+ 0x1E024: 84,
+ 0x1E026: 84,
+ 0x1E027: 84,
+ 0x1E028: 84,
+ 0x1E029: 84,
+ 0x1E02A: 84,
+ 0x1E08F: 84,
+ 0x1E130: 84,
+ 0x1E131: 84,
+ 0x1E132: 84,
+ 0x1E133: 84,
+ 0x1E134: 84,
+ 0x1E135: 84,
+ 0x1E136: 84,
+ 0x1E2AE: 84,
+ 0x1E2EC: 84,
+ 0x1E2ED: 84,
+ 0x1E2EE: 84,
+ 0x1E2EF: 84,
+ 0x1E4EC: 84,
+ 0x1E4ED: 84,
+ 0x1E4EE: 84,
+ 0x1E4EF: 84,
+ 0x1E5EE: 84,
+ 0x1E5EF: 84,
+ 0x1E8D0: 84,
+ 0x1E8D1: 84,
+ 0x1E8D2: 84,
+ 0x1E8D3: 84,
+ 0x1E8D4: 84,
+ 0x1E8D5: 84,
+ 0x1E8D6: 84,
+ 0x1E900: 68,
+ 0x1E901: 68,
+ 0x1E902: 68,
+ 0x1E903: 68,
+ 0x1E904: 68,
+ 0x1E905: 68,
+ 0x1E906: 68,
+ 0x1E907: 68,
+ 0x1E908: 68,
+ 0x1E909: 68,
+ 0x1E90A: 68,
+ 0x1E90B: 68,
+ 0x1E90C: 68,
+ 0x1E90D: 68,
+ 0x1E90E: 68,
+ 0x1E90F: 68,
+ 0x1E910: 68,
+ 0x1E911: 68,
+ 0x1E912: 68,
+ 0x1E913: 68,
+ 0x1E914: 68,
+ 0x1E915: 68,
+ 0x1E916: 68,
+ 0x1E917: 68,
+ 0x1E918: 68,
+ 0x1E919: 68,
+ 0x1E91A: 68,
+ 0x1E91B: 68,
+ 0x1E91C: 68,
+ 0x1E91D: 68,
+ 0x1E91E: 68,
+ 0x1E91F: 68,
+ 0x1E920: 68,
+ 0x1E921: 68,
+ 0x1E922: 68,
+ 0x1E923: 68,
+ 0x1E924: 68,
+ 0x1E925: 68,
+ 0x1E926: 68,
+ 0x1E927: 68,
+ 0x1E928: 68,
+ 0x1E929: 68,
+ 0x1E92A: 68,
+ 0x1E92B: 68,
+ 0x1E92C: 68,
+ 0x1E92D: 68,
+ 0x1E92E: 68,
+ 0x1E92F: 68,
+ 0x1E930: 68,
+ 0x1E931: 68,
+ 0x1E932: 68,
+ 0x1E933: 68,
+ 0x1E934: 68,
+ 0x1E935: 68,
+ 0x1E936: 68,
+ 0x1E937: 68,
+ 0x1E938: 68,
+ 0x1E939: 68,
+ 0x1E93A: 68,
+ 0x1E93B: 68,
+ 0x1E93C: 68,
+ 0x1E93D: 68,
+ 0x1E93E: 68,
+ 0x1E93F: 68,
+ 0x1E940: 68,
+ 0x1E941: 68,
+ 0x1E942: 68,
+ 0x1E943: 68,
+ 0x1E944: 84,
+ 0x1E945: 84,
+ 0x1E946: 84,
+ 0x1E947: 84,
+ 0x1E948: 84,
+ 0x1E949: 84,
+ 0x1E94A: 84,
+ 0x1E94B: 84,
+ 0xE0001: 84,
+ 0xE0020: 84,
+ 0xE0021: 84,
+ 0xE0022: 84,
+ 0xE0023: 84,
+ 0xE0024: 84,
+ 0xE0025: 84,
+ 0xE0026: 84,
+ 0xE0027: 84,
+ 0xE0028: 84,
+ 0xE0029: 84,
+ 0xE002A: 84,
+ 0xE002B: 84,
+ 0xE002C: 84,
+ 0xE002D: 84,
+ 0xE002E: 84,
+ 0xE002F: 84,
+ 0xE0030: 84,
+ 0xE0031: 84,
+ 0xE0032: 84,
+ 0xE0033: 84,
+ 0xE0034: 84,
+ 0xE0035: 84,
+ 0xE0036: 84,
+ 0xE0037: 84,
+ 0xE0038: 84,
+ 0xE0039: 84,
+ 0xE003A: 84,
+ 0xE003B: 84,
+ 0xE003C: 84,
+ 0xE003D: 84,
+ 0xE003E: 84,
+ 0xE003F: 84,
+ 0xE0040: 84,
+ 0xE0041: 84,
+ 0xE0042: 84,
+ 0xE0043: 84,
+ 0xE0044: 84,
+ 0xE0045: 84,
+ 0xE0046: 84,
+ 0xE0047: 84,
+ 0xE0048: 84,
+ 0xE0049: 84,
+ 0xE004A: 84,
+ 0xE004B: 84,
+ 0xE004C: 84,
+ 0xE004D: 84,
+ 0xE004E: 84,
+ 0xE004F: 84,
+ 0xE0050: 84,
+ 0xE0051: 84,
+ 0xE0052: 84,
+ 0xE0053: 84,
+ 0xE0054: 84,
+ 0xE0055: 84,
+ 0xE0056: 84,
+ 0xE0057: 84,
+ 0xE0058: 84,
+ 0xE0059: 84,
+ 0xE005A: 84,
+ 0xE005B: 84,
+ 0xE005C: 84,
+ 0xE005D: 84,
+ 0xE005E: 84,
+ 0xE005F: 84,
+ 0xE0060: 84,
+ 0xE0061: 84,
+ 0xE0062: 84,
+ 0xE0063: 84,
+ 0xE0064: 84,
+ 0xE0065: 84,
+ 0xE0066: 84,
+ 0xE0067: 84,
+ 0xE0068: 84,
+ 0xE0069: 84,
+ 0xE006A: 84,
+ 0xE006B: 84,
+ 0xE006C: 84,
+ 0xE006D: 84,
+ 0xE006E: 84,
+ 0xE006F: 84,
+ 0xE0070: 84,
+ 0xE0071: 84,
+ 0xE0072: 84,
+ 0xE0073: 84,
+ 0xE0074: 84,
+ 0xE0075: 84,
+ 0xE0076: 84,
+ 0xE0077: 84,
+ 0xE0078: 84,
+ 0xE0079: 84,
+ 0xE007A: 84,
+ 0xE007B: 84,
+ 0xE007C: 84,
+ 0xE007D: 84,
+ 0xE007E: 84,
+ 0xE007F: 84,
+ 0xE0100: 84,
+ 0xE0101: 84,
+ 0xE0102: 84,
+ 0xE0103: 84,
+ 0xE0104: 84,
+ 0xE0105: 84,
+ 0xE0106: 84,
+ 0xE0107: 84,
+ 0xE0108: 84,
+ 0xE0109: 84,
+ 0xE010A: 84,
+ 0xE010B: 84,
+ 0xE010C: 84,
+ 0xE010D: 84,
+ 0xE010E: 84,
+ 0xE010F: 84,
+ 0xE0110: 84,
+ 0xE0111: 84,
+ 0xE0112: 84,
+ 0xE0113: 84,
+ 0xE0114: 84,
+ 0xE0115: 84,
+ 0xE0116: 84,
+ 0xE0117: 84,
+ 0xE0118: 84,
+ 0xE0119: 84,
+ 0xE011A: 84,
+ 0xE011B: 84,
+ 0xE011C: 84,
+ 0xE011D: 84,
+ 0xE011E: 84,
+ 0xE011F: 84,
+ 0xE0120: 84,
+ 0xE0121: 84,
+ 0xE0122: 84,
+ 0xE0123: 84,
+ 0xE0124: 84,
+ 0xE0125: 84,
+ 0xE0126: 84,
+ 0xE0127: 84,
+ 0xE0128: 84,
+ 0xE0129: 84,
+ 0xE012A: 84,
+ 0xE012B: 84,
+ 0xE012C: 84,
+ 0xE012D: 84,
+ 0xE012E: 84,
+ 0xE012F: 84,
+ 0xE0130: 84,
+ 0xE0131: 84,
+ 0xE0132: 84,
+ 0xE0133: 84,
+ 0xE0134: 84,
+ 0xE0135: 84,
+ 0xE0136: 84,
+ 0xE0137: 84,
+ 0xE0138: 84,
+ 0xE0139: 84,
+ 0xE013A: 84,
+ 0xE013B: 84,
+ 0xE013C: 84,
+ 0xE013D: 84,
+ 0xE013E: 84,
+ 0xE013F: 84,
+ 0xE0140: 84,
+ 0xE0141: 84,
+ 0xE0142: 84,
+ 0xE0143: 84,
+ 0xE0144: 84,
+ 0xE0145: 84,
+ 0xE0146: 84,
+ 0xE0147: 84,
+ 0xE0148: 84,
+ 0xE0149: 84,
+ 0xE014A: 84,
+ 0xE014B: 84,
+ 0xE014C: 84,
+ 0xE014D: 84,
+ 0xE014E: 84,
+ 0xE014F: 84,
+ 0xE0150: 84,
+ 0xE0151: 84,
+ 0xE0152: 84,
+ 0xE0153: 84,
+ 0xE0154: 84,
+ 0xE0155: 84,
+ 0xE0156: 84,
+ 0xE0157: 84,
+ 0xE0158: 84,
+ 0xE0159: 84,
+ 0xE015A: 84,
+ 0xE015B: 84,
+ 0xE015C: 84,
+ 0xE015D: 84,
+ 0xE015E: 84,
+ 0xE015F: 84,
+ 0xE0160: 84,
+ 0xE0161: 84,
+ 0xE0162: 84,
+ 0xE0163: 84,
+ 0xE0164: 84,
+ 0xE0165: 84,
+ 0xE0166: 84,
+ 0xE0167: 84,
+ 0xE0168: 84,
+ 0xE0169: 84,
+ 0xE016A: 84,
+ 0xE016B: 84,
+ 0xE016C: 84,
+ 0xE016D: 84,
+ 0xE016E: 84,
+ 0xE016F: 84,
+ 0xE0170: 84,
+ 0xE0171: 84,
+ 0xE0172: 84,
+ 0xE0173: 84,
+ 0xE0174: 84,
+ 0xE0175: 84,
+ 0xE0176: 84,
+ 0xE0177: 84,
+ 0xE0178: 84,
+ 0xE0179: 84,
+ 0xE017A: 84,
+ 0xE017B: 84,
+ 0xE017C: 84,
+ 0xE017D: 84,
+ 0xE017E: 84,
+ 0xE017F: 84,
+ 0xE0180: 84,
+ 0xE0181: 84,
+ 0xE0182: 84,
+ 0xE0183: 84,
+ 0xE0184: 84,
+ 0xE0185: 84,
+ 0xE0186: 84,
+ 0xE0187: 84,
+ 0xE0188: 84,
+ 0xE0189: 84,
+ 0xE018A: 84,
+ 0xE018B: 84,
+ 0xE018C: 84,
+ 0xE018D: 84,
+ 0xE018E: 84,
+ 0xE018F: 84,
+ 0xE0190: 84,
+ 0xE0191: 84,
+ 0xE0192: 84,
+ 0xE0193: 84,
+ 0xE0194: 84,
+ 0xE0195: 84,
+ 0xE0196: 84,
+ 0xE0197: 84,
+ 0xE0198: 84,
+ 0xE0199: 84,
+ 0xE019A: 84,
+ 0xE019B: 84,
+ 0xE019C: 84,
+ 0xE019D: 84,
+ 0xE019E: 84,
+ 0xE019F: 84,
+ 0xE01A0: 84,
+ 0xE01A1: 84,
+ 0xE01A2: 84,
+ 0xE01A3: 84,
+ 0xE01A4: 84,
+ 0xE01A5: 84,
+ 0xE01A6: 84,
+ 0xE01A7: 84,
+ 0xE01A8: 84,
+ 0xE01A9: 84,
+ 0xE01AA: 84,
+ 0xE01AB: 84,
+ 0xE01AC: 84,
+ 0xE01AD: 84,
+ 0xE01AE: 84,
+ 0xE01AF: 84,
+ 0xE01B0: 84,
+ 0xE01B1: 84,
+ 0xE01B2: 84,
+ 0xE01B3: 84,
+ 0xE01B4: 84,
+ 0xE01B5: 84,
+ 0xE01B6: 84,
+ 0xE01B7: 84,
+ 0xE01B8: 84,
+ 0xE01B9: 84,
+ 0xE01BA: 84,
+ 0xE01BB: 84,
+ 0xE01BC: 84,
+ 0xE01BD: 84,
+ 0xE01BE: 84,
+ 0xE01BF: 84,
+ 0xE01C0: 84,
+ 0xE01C1: 84,
+ 0xE01C2: 84,
+ 0xE01C3: 84,
+ 0xE01C4: 84,
+ 0xE01C5: 84,
+ 0xE01C6: 84,
+ 0xE01C7: 84,
+ 0xE01C8: 84,
+ 0xE01C9: 84,
+ 0xE01CA: 84,
+ 0xE01CB: 84,
+ 0xE01CC: 84,
+ 0xE01CD: 84,
+ 0xE01CE: 84,
+ 0xE01CF: 84,
+ 0xE01D0: 84,
+ 0xE01D1: 84,
+ 0xE01D2: 84,
+ 0xE01D3: 84,
+ 0xE01D4: 84,
+ 0xE01D5: 84,
+ 0xE01D6: 84,
+ 0xE01D7: 84,
+ 0xE01D8: 84,
+ 0xE01D9: 84,
+ 0xE01DA: 84,
+ 0xE01DB: 84,
+ 0xE01DC: 84,
+ 0xE01DD: 84,
+ 0xE01DE: 84,
+ 0xE01DF: 84,
+ 0xE01E0: 84,
+ 0xE01E1: 84,
+ 0xE01E2: 84,
+ 0xE01E3: 84,
+ 0xE01E4: 84,
+ 0xE01E5: 84,
+ 0xE01E6: 84,
+ 0xE01E7: 84,
+ 0xE01E8: 84,
+ 0xE01E9: 84,
+ 0xE01EA: 84,
+ 0xE01EB: 84,
+ 0xE01EC: 84,
+ 0xE01ED: 84,
+ 0xE01EE: 84,
+ 0xE01EF: 84,
+}
+codepoint_classes = {
+ "PVALID": (
+ 0x2D0000002E,
+ 0x300000003A,
+ 0x610000007B,
+ 0xDF000000F7,
+ 0xF800000100,
+ 0x10100000102,
+ 0x10300000104,
+ 0x10500000106,
+ 0x10700000108,
+ 0x1090000010A,
+ 0x10B0000010C,
+ 0x10D0000010E,
+ 0x10F00000110,
+ 0x11100000112,
+ 0x11300000114,
+ 0x11500000116,
+ 0x11700000118,
+ 0x1190000011A,
+ 0x11B0000011C,
+ 0x11D0000011E,
+ 0x11F00000120,
+ 0x12100000122,
+ 0x12300000124,
+ 0x12500000126,
+ 0x12700000128,
+ 0x1290000012A,
+ 0x12B0000012C,
+ 0x12D0000012E,
+ 0x12F00000130,
+ 0x13100000132,
+ 0x13500000136,
+ 0x13700000139,
+ 0x13A0000013B,
+ 0x13C0000013D,
+ 0x13E0000013F,
+ 0x14200000143,
+ 0x14400000145,
+ 0x14600000147,
+ 0x14800000149,
+ 0x14B0000014C,
+ 0x14D0000014E,
+ 0x14F00000150,
+ 0x15100000152,
+ 0x15300000154,
+ 0x15500000156,
+ 0x15700000158,
+ 0x1590000015A,
+ 0x15B0000015C,
+ 0x15D0000015E,
+ 0x15F00000160,
+ 0x16100000162,
+ 0x16300000164,
+ 0x16500000166,
+ 0x16700000168,
+ 0x1690000016A,
+ 0x16B0000016C,
+ 0x16D0000016E,
+ 0x16F00000170,
+ 0x17100000172,
+ 0x17300000174,
+ 0x17500000176,
+ 0x17700000178,
+ 0x17A0000017B,
+ 0x17C0000017D,
+ 0x17E0000017F,
+ 0x18000000181,
+ 0x18300000184,
+ 0x18500000186,
+ 0x18800000189,
+ 0x18C0000018E,
+ 0x19200000193,
+ 0x19500000196,
+ 0x1990000019C,
+ 0x19E0000019F,
+ 0x1A1000001A2,
+ 0x1A3000001A4,
+ 0x1A5000001A6,
+ 0x1A8000001A9,
+ 0x1AA000001AC,
+ 0x1AD000001AE,
+ 0x1B0000001B1,
+ 0x1B4000001B5,
+ 0x1B6000001B7,
+ 0x1B9000001BC,
+ 0x1BD000001C4,
+ 0x1CE000001CF,
+ 0x1D0000001D1,
+ 0x1D2000001D3,
+ 0x1D4000001D5,
+ 0x1D6000001D7,
+ 0x1D8000001D9,
+ 0x1DA000001DB,
+ 0x1DC000001DE,
+ 0x1DF000001E0,
+ 0x1E1000001E2,
+ 0x1E3000001E4,
+ 0x1E5000001E6,
+ 0x1E7000001E8,
+ 0x1E9000001EA,
+ 0x1EB000001EC,
+ 0x1ED000001EE,
+ 0x1EF000001F1,
+ 0x1F5000001F6,
+ 0x1F9000001FA,
+ 0x1FB000001FC,
+ 0x1FD000001FE,
+ 0x1FF00000200,
+ 0x20100000202,
+ 0x20300000204,
+ 0x20500000206,
+ 0x20700000208,
+ 0x2090000020A,
+ 0x20B0000020C,
+ 0x20D0000020E,
+ 0x20F00000210,
+ 0x21100000212,
+ 0x21300000214,
+ 0x21500000216,
+ 0x21700000218,
+ 0x2190000021A,
+ 0x21B0000021C,
+ 0x21D0000021E,
+ 0x21F00000220,
+ 0x22100000222,
+ 0x22300000224,
+ 0x22500000226,
+ 0x22700000228,
+ 0x2290000022A,
+ 0x22B0000022C,
+ 0x22D0000022E,
+ 0x22F00000230,
+ 0x23100000232,
+ 0x2330000023A,
+ 0x23C0000023D,
+ 0x23F00000241,
+ 0x24200000243,
+ 0x24700000248,
+ 0x2490000024A,
+ 0x24B0000024C,
+ 0x24D0000024E,
+ 0x24F000002B0,
+ 0x2B9000002C2,
+ 0x2C6000002D2,
+ 0x2EC000002ED,
+ 0x2EE000002EF,
+ 0x30000000340,
+ 0x34200000343,
+ 0x3460000034F,
+ 0x35000000370,
+ 0x37100000372,
+ 0x37300000374,
+ 0x37700000378,
+ 0x37B0000037E,
+ 0x39000000391,
+ 0x3AC000003CF,
+ 0x3D7000003D8,
+ 0x3D9000003DA,
+ 0x3DB000003DC,
+ 0x3DD000003DE,
+ 0x3DF000003E0,
+ 0x3E1000003E2,
+ 0x3E3000003E4,
+ 0x3E5000003E6,
+ 0x3E7000003E8,
+ 0x3E9000003EA,
+ 0x3EB000003EC,
+ 0x3ED000003EE,
+ 0x3EF000003F0,
+ 0x3F3000003F4,
+ 0x3F8000003F9,
+ 0x3FB000003FD,
+ 0x43000000460,
+ 0x46100000462,
+ 0x46300000464,
+ 0x46500000466,
+ 0x46700000468,
+ 0x4690000046A,
+ 0x46B0000046C,
+ 0x46D0000046E,
+ 0x46F00000470,
+ 0x47100000472,
+ 0x47300000474,
+ 0x47500000476,
+ 0x47700000478,
+ 0x4790000047A,
+ 0x47B0000047C,
+ 0x47D0000047E,
+ 0x47F00000480,
+ 0x48100000482,
+ 0x48300000488,
+ 0x48B0000048C,
+ 0x48D0000048E,
+ 0x48F00000490,
+ 0x49100000492,
+ 0x49300000494,
+ 0x49500000496,
+ 0x49700000498,
+ 0x4990000049A,
+ 0x49B0000049C,
+ 0x49D0000049E,
+ 0x49F000004A0,
+ 0x4A1000004A2,
+ 0x4A3000004A4,
+ 0x4A5000004A6,
+ 0x4A7000004A8,
+ 0x4A9000004AA,
+ 0x4AB000004AC,
+ 0x4AD000004AE,
+ 0x4AF000004B0,
+ 0x4B1000004B2,
+ 0x4B3000004B4,
+ 0x4B5000004B6,
+ 0x4B7000004B8,
+ 0x4B9000004BA,
+ 0x4BB000004BC,
+ 0x4BD000004BE,
+ 0x4BF000004C0,
+ 0x4C2000004C3,
+ 0x4C4000004C5,
+ 0x4C6000004C7,
+ 0x4C8000004C9,
+ 0x4CA000004CB,
+ 0x4CC000004CD,
+ 0x4CE000004D0,
+ 0x4D1000004D2,
+ 0x4D3000004D4,
+ 0x4D5000004D6,
+ 0x4D7000004D8,
+ 0x4D9000004DA,
+ 0x4DB000004DC,
+ 0x4DD000004DE,
+ 0x4DF000004E0,
+ 0x4E1000004E2,
+ 0x4E3000004E4,
+ 0x4E5000004E6,
+ 0x4E7000004E8,
+ 0x4E9000004EA,
+ 0x4EB000004EC,
+ 0x4ED000004EE,
+ 0x4EF000004F0,
+ 0x4F1000004F2,
+ 0x4F3000004F4,
+ 0x4F5000004F6,
+ 0x4F7000004F8,
+ 0x4F9000004FA,
+ 0x4FB000004FC,
+ 0x4FD000004FE,
+ 0x4FF00000500,
+ 0x50100000502,
+ 0x50300000504,
+ 0x50500000506,
+ 0x50700000508,
+ 0x5090000050A,
+ 0x50B0000050C,
+ 0x50D0000050E,
+ 0x50F00000510,
+ 0x51100000512,
+ 0x51300000514,
+ 0x51500000516,
+ 0x51700000518,
+ 0x5190000051A,
+ 0x51B0000051C,
+ 0x51D0000051E,
+ 0x51F00000520,
+ 0x52100000522,
+ 0x52300000524,
+ 0x52500000526,
+ 0x52700000528,
+ 0x5290000052A,
+ 0x52B0000052C,
+ 0x52D0000052E,
+ 0x52F00000530,
+ 0x5590000055A,
+ 0x56000000587,
+ 0x58800000589,
+ 0x591000005BE,
+ 0x5BF000005C0,
+ 0x5C1000005C3,
+ 0x5C4000005C6,
+ 0x5C7000005C8,
+ 0x5D0000005EB,
+ 0x5EF000005F3,
+ 0x6100000061B,
+ 0x62000000640,
+ 0x64100000660,
+ 0x66E00000675,
+ 0x679000006D4,
+ 0x6D5000006DD,
+ 0x6DF000006E9,
+ 0x6EA000006F0,
+ 0x6FA00000700,
+ 0x7100000074B,
+ 0x74D000007B2,
+ 0x7C0000007F6,
+ 0x7FD000007FE,
+ 0x8000000082E,
+ 0x8400000085C,
+ 0x8600000086B,
+ 0x87000000888,
+ 0x8890000088F,
+ 0x897000008E2,
+ 0x8E300000958,
+ 0x96000000964,
+ 0x96600000970,
+ 0x97100000984,
+ 0x9850000098D,
+ 0x98F00000991,
+ 0x993000009A9,
+ 0x9AA000009B1,
+ 0x9B2000009B3,
+ 0x9B6000009BA,
+ 0x9BC000009C5,
+ 0x9C7000009C9,
+ 0x9CB000009CF,
+ 0x9D7000009D8,
+ 0x9E0000009E4,
+ 0x9E6000009F2,
+ 0x9FC000009FD,
+ 0x9FE000009FF,
+ 0xA0100000A04,
+ 0xA0500000A0B,
+ 0xA0F00000A11,
+ 0xA1300000A29,
+ 0xA2A00000A31,
+ 0xA3200000A33,
+ 0xA3500000A36,
+ 0xA3800000A3A,
+ 0xA3C00000A3D,
+ 0xA3E00000A43,
+ 0xA4700000A49,
+ 0xA4B00000A4E,
+ 0xA5100000A52,
+ 0xA5C00000A5D,
+ 0xA6600000A76,
+ 0xA8100000A84,
+ 0xA8500000A8E,
+ 0xA8F00000A92,
+ 0xA9300000AA9,
+ 0xAAA00000AB1,
+ 0xAB200000AB4,
+ 0xAB500000ABA,
+ 0xABC00000AC6,
+ 0xAC700000ACA,
+ 0xACB00000ACE,
+ 0xAD000000AD1,
+ 0xAE000000AE4,
+ 0xAE600000AF0,
+ 0xAF900000B00,
+ 0xB0100000B04,
+ 0xB0500000B0D,
+ 0xB0F00000B11,
+ 0xB1300000B29,
+ 0xB2A00000B31,
+ 0xB3200000B34,
+ 0xB3500000B3A,
+ 0xB3C00000B45,
+ 0xB4700000B49,
+ 0xB4B00000B4E,
+ 0xB5500000B58,
+ 0xB5F00000B64,
+ 0xB6600000B70,
+ 0xB7100000B72,
+ 0xB8200000B84,
+ 0xB8500000B8B,
+ 0xB8E00000B91,
+ 0xB9200000B96,
+ 0xB9900000B9B,
+ 0xB9C00000B9D,
+ 0xB9E00000BA0,
+ 0xBA300000BA5,
+ 0xBA800000BAB,
+ 0xBAE00000BBA,
+ 0xBBE00000BC3,
+ 0xBC600000BC9,
+ 0xBCA00000BCE,
+ 0xBD000000BD1,
+ 0xBD700000BD8,
+ 0xBE600000BF0,
+ 0xC0000000C0D,
+ 0xC0E00000C11,
+ 0xC1200000C29,
+ 0xC2A00000C3A,
+ 0xC3C00000C45,
+ 0xC4600000C49,
+ 0xC4A00000C4E,
+ 0xC5500000C57,
+ 0xC5800000C5B,
+ 0xC5D00000C5E,
+ 0xC6000000C64,
+ 0xC6600000C70,
+ 0xC8000000C84,
+ 0xC8500000C8D,
+ 0xC8E00000C91,
+ 0xC9200000CA9,
+ 0xCAA00000CB4,
+ 0xCB500000CBA,
+ 0xCBC00000CC5,
+ 0xCC600000CC9,
+ 0xCCA00000CCE,
+ 0xCD500000CD7,
+ 0xCDD00000CDF,
+ 0xCE000000CE4,
+ 0xCE600000CF0,
+ 0xCF100000CF4,
+ 0xD0000000D0D,
+ 0xD0E00000D11,
+ 0xD1200000D45,
+ 0xD4600000D49,
+ 0xD4A00000D4F,
+ 0xD5400000D58,
+ 0xD5F00000D64,
+ 0xD6600000D70,
+ 0xD7A00000D80,
+ 0xD8100000D84,
+ 0xD8500000D97,
+ 0xD9A00000DB2,
+ 0xDB300000DBC,
+ 0xDBD00000DBE,
+ 0xDC000000DC7,
+ 0xDCA00000DCB,
+ 0xDCF00000DD5,
+ 0xDD600000DD7,
+ 0xDD800000DE0,
+ 0xDE600000DF0,
+ 0xDF200000DF4,
+ 0xE0100000E33,
+ 0xE3400000E3B,
+ 0xE4000000E4F,
+ 0xE5000000E5A,
+ 0xE8100000E83,
+ 0xE8400000E85,
+ 0xE8600000E8B,
+ 0xE8C00000EA4,
+ 0xEA500000EA6,
+ 0xEA700000EB3,
+ 0xEB400000EBE,
+ 0xEC000000EC5,
+ 0xEC600000EC7,
+ 0xEC800000ECF,
+ 0xED000000EDA,
+ 0xEDE00000EE0,
+ 0xF0000000F01,
+ 0xF0B00000F0C,
+ 0xF1800000F1A,
+ 0xF2000000F2A,
+ 0xF3500000F36,
+ 0xF3700000F38,
+ 0xF3900000F3A,
+ 0xF3E00000F43,
+ 0xF4400000F48,
+ 0xF4900000F4D,
+ 0xF4E00000F52,
+ 0xF5300000F57,
+ 0xF5800000F5C,
+ 0xF5D00000F69,
+ 0xF6A00000F6D,
+ 0xF7100000F73,
+ 0xF7400000F75,
+ 0xF7A00000F81,
+ 0xF8200000F85,
+ 0xF8600000F93,
+ 0xF9400000F98,
+ 0xF9900000F9D,
+ 0xF9E00000FA2,
+ 0xFA300000FA7,
+ 0xFA800000FAC,
+ 0xFAD00000FB9,
+ 0xFBA00000FBD,
+ 0xFC600000FC7,
+ 0x10000000104A,
+ 0x10500000109E,
+ 0x10D0000010FB,
+ 0x10FD00001100,
+ 0x120000001249,
+ 0x124A0000124E,
+ 0x125000001257,
+ 0x125800001259,
+ 0x125A0000125E,
+ 0x126000001289,
+ 0x128A0000128E,
+ 0x1290000012B1,
+ 0x12B2000012B6,
+ 0x12B8000012BF,
+ 0x12C0000012C1,
+ 0x12C2000012C6,
+ 0x12C8000012D7,
+ 0x12D800001311,
+ 0x131200001316,
+ 0x13180000135B,
+ 0x135D00001360,
+ 0x138000001390,
+ 0x13A0000013F6,
+ 0x14010000166D,
+ 0x166F00001680,
+ 0x16810000169B,
+ 0x16A0000016EB,
+ 0x16F1000016F9,
+ 0x170000001716,
+ 0x171F00001735,
+ 0x174000001754,
+ 0x17600000176D,
+ 0x176E00001771,
+ 0x177200001774,
+ 0x1780000017B4,
+ 0x17B6000017D4,
+ 0x17D7000017D8,
+ 0x17DC000017DE,
+ 0x17E0000017EA,
+ 0x18100000181A,
+ 0x182000001879,
+ 0x1880000018AB,
+ 0x18B0000018F6,
+ 0x19000000191F,
+ 0x19200000192C,
+ 0x19300000193C,
+ 0x19460000196E,
+ 0x197000001975,
+ 0x1980000019AC,
+ 0x19B0000019CA,
+ 0x19D0000019DA,
+ 0x1A0000001A1C,
+ 0x1A2000001A5F,
+ 0x1A6000001A7D,
+ 0x1A7F00001A8A,
+ 0x1A9000001A9A,
+ 0x1AA700001AA8,
+ 0x1AB000001ABE,
+ 0x1ABF00001ACF,
+ 0x1B0000001B4D,
+ 0x1B5000001B5A,
+ 0x1B6B00001B74,
+ 0x1B8000001BF4,
+ 0x1C0000001C38,
+ 0x1C4000001C4A,
+ 0x1C4D00001C7E,
+ 0x1C8A00001C8B,
+ 0x1CD000001CD3,
+ 0x1CD400001CFB,
+ 0x1D0000001D2C,
+ 0x1D2F00001D30,
+ 0x1D3B00001D3C,
+ 0x1D4E00001D4F,
+ 0x1D6B00001D78,
+ 0x1D7900001D9B,
+ 0x1DC000001E00,
+ 0x1E0100001E02,
+ 0x1E0300001E04,
+ 0x1E0500001E06,
+ 0x1E0700001E08,
+ 0x1E0900001E0A,
+ 0x1E0B00001E0C,
+ 0x1E0D00001E0E,
+ 0x1E0F00001E10,
+ 0x1E1100001E12,
+ 0x1E1300001E14,
+ 0x1E1500001E16,
+ 0x1E1700001E18,
+ 0x1E1900001E1A,
+ 0x1E1B00001E1C,
+ 0x1E1D00001E1E,
+ 0x1E1F00001E20,
+ 0x1E2100001E22,
+ 0x1E2300001E24,
+ 0x1E2500001E26,
+ 0x1E2700001E28,
+ 0x1E2900001E2A,
+ 0x1E2B00001E2C,
+ 0x1E2D00001E2E,
+ 0x1E2F00001E30,
+ 0x1E3100001E32,
+ 0x1E3300001E34,
+ 0x1E3500001E36,
+ 0x1E3700001E38,
+ 0x1E3900001E3A,
+ 0x1E3B00001E3C,
+ 0x1E3D00001E3E,
+ 0x1E3F00001E40,
+ 0x1E4100001E42,
+ 0x1E4300001E44,
+ 0x1E4500001E46,
+ 0x1E4700001E48,
+ 0x1E4900001E4A,
+ 0x1E4B00001E4C,
+ 0x1E4D00001E4E,
+ 0x1E4F00001E50,
+ 0x1E5100001E52,
+ 0x1E5300001E54,
+ 0x1E5500001E56,
+ 0x1E5700001E58,
+ 0x1E5900001E5A,
+ 0x1E5B00001E5C,
+ 0x1E5D00001E5E,
+ 0x1E5F00001E60,
+ 0x1E6100001E62,
+ 0x1E6300001E64,
+ 0x1E6500001E66,
+ 0x1E6700001E68,
+ 0x1E6900001E6A,
+ 0x1E6B00001E6C,
+ 0x1E6D00001E6E,
+ 0x1E6F00001E70,
+ 0x1E7100001E72,
+ 0x1E7300001E74,
+ 0x1E7500001E76,
+ 0x1E7700001E78,
+ 0x1E7900001E7A,
+ 0x1E7B00001E7C,
+ 0x1E7D00001E7E,
+ 0x1E7F00001E80,
+ 0x1E8100001E82,
+ 0x1E8300001E84,
+ 0x1E8500001E86,
+ 0x1E8700001E88,
+ 0x1E8900001E8A,
+ 0x1E8B00001E8C,
+ 0x1E8D00001E8E,
+ 0x1E8F00001E90,
+ 0x1E9100001E92,
+ 0x1E9300001E94,
+ 0x1E9500001E9A,
+ 0x1E9C00001E9E,
+ 0x1E9F00001EA0,
+ 0x1EA100001EA2,
+ 0x1EA300001EA4,
+ 0x1EA500001EA6,
+ 0x1EA700001EA8,
+ 0x1EA900001EAA,
+ 0x1EAB00001EAC,
+ 0x1EAD00001EAE,
+ 0x1EAF00001EB0,
+ 0x1EB100001EB2,
+ 0x1EB300001EB4,
+ 0x1EB500001EB6,
+ 0x1EB700001EB8,
+ 0x1EB900001EBA,
+ 0x1EBB00001EBC,
+ 0x1EBD00001EBE,
+ 0x1EBF00001EC0,
+ 0x1EC100001EC2,
+ 0x1EC300001EC4,
+ 0x1EC500001EC6,
+ 0x1EC700001EC8,
+ 0x1EC900001ECA,
+ 0x1ECB00001ECC,
+ 0x1ECD00001ECE,
+ 0x1ECF00001ED0,
+ 0x1ED100001ED2,
+ 0x1ED300001ED4,
+ 0x1ED500001ED6,
+ 0x1ED700001ED8,
+ 0x1ED900001EDA,
+ 0x1EDB00001EDC,
+ 0x1EDD00001EDE,
+ 0x1EDF00001EE0,
+ 0x1EE100001EE2,
+ 0x1EE300001EE4,
+ 0x1EE500001EE6,
+ 0x1EE700001EE8,
+ 0x1EE900001EEA,
+ 0x1EEB00001EEC,
+ 0x1EED00001EEE,
+ 0x1EEF00001EF0,
+ 0x1EF100001EF2,
+ 0x1EF300001EF4,
+ 0x1EF500001EF6,
+ 0x1EF700001EF8,
+ 0x1EF900001EFA,
+ 0x1EFB00001EFC,
+ 0x1EFD00001EFE,
+ 0x1EFF00001F08,
+ 0x1F1000001F16,
+ 0x1F2000001F28,
+ 0x1F3000001F38,
+ 0x1F4000001F46,
+ 0x1F5000001F58,
+ 0x1F6000001F68,
+ 0x1F7000001F71,
+ 0x1F7200001F73,
+ 0x1F7400001F75,
+ 0x1F7600001F77,
+ 0x1F7800001F79,
+ 0x1F7A00001F7B,
+ 0x1F7C00001F7D,
+ 0x1FB000001FB2,
+ 0x1FB600001FB7,
+ 0x1FC600001FC7,
+ 0x1FD000001FD3,
+ 0x1FD600001FD8,
+ 0x1FE000001FE3,
+ 0x1FE400001FE8,
+ 0x1FF600001FF7,
+ 0x214E0000214F,
+ 0x218400002185,
+ 0x2C3000002C60,
+ 0x2C6100002C62,
+ 0x2C6500002C67,
+ 0x2C6800002C69,
+ 0x2C6A00002C6B,
+ 0x2C6C00002C6D,
+ 0x2C7100002C72,
+ 0x2C7300002C75,
+ 0x2C7600002C7C,
+ 0x2C8100002C82,
+ 0x2C8300002C84,
+ 0x2C8500002C86,
+ 0x2C8700002C88,
+ 0x2C8900002C8A,
+ 0x2C8B00002C8C,
+ 0x2C8D00002C8E,
+ 0x2C8F00002C90,
+ 0x2C9100002C92,
+ 0x2C9300002C94,
+ 0x2C9500002C96,
+ 0x2C9700002C98,
+ 0x2C9900002C9A,
+ 0x2C9B00002C9C,
+ 0x2C9D00002C9E,
+ 0x2C9F00002CA0,
+ 0x2CA100002CA2,
+ 0x2CA300002CA4,
+ 0x2CA500002CA6,
+ 0x2CA700002CA8,
+ 0x2CA900002CAA,
+ 0x2CAB00002CAC,
+ 0x2CAD00002CAE,
+ 0x2CAF00002CB0,
+ 0x2CB100002CB2,
+ 0x2CB300002CB4,
+ 0x2CB500002CB6,
+ 0x2CB700002CB8,
+ 0x2CB900002CBA,
+ 0x2CBB00002CBC,
+ 0x2CBD00002CBE,
+ 0x2CBF00002CC0,
+ 0x2CC100002CC2,
+ 0x2CC300002CC4,
+ 0x2CC500002CC6,
+ 0x2CC700002CC8,
+ 0x2CC900002CCA,
+ 0x2CCB00002CCC,
+ 0x2CCD00002CCE,
+ 0x2CCF00002CD0,
+ 0x2CD100002CD2,
+ 0x2CD300002CD4,
+ 0x2CD500002CD6,
+ 0x2CD700002CD8,
+ 0x2CD900002CDA,
+ 0x2CDB00002CDC,
+ 0x2CDD00002CDE,
+ 0x2CDF00002CE0,
+ 0x2CE100002CE2,
+ 0x2CE300002CE5,
+ 0x2CEC00002CED,
+ 0x2CEE00002CF2,
+ 0x2CF300002CF4,
+ 0x2D0000002D26,
+ 0x2D2700002D28,
+ 0x2D2D00002D2E,
+ 0x2D3000002D68,
+ 0x2D7F00002D97,
+ 0x2DA000002DA7,
+ 0x2DA800002DAF,
+ 0x2DB000002DB7,
+ 0x2DB800002DBF,
+ 0x2DC000002DC7,
+ 0x2DC800002DCF,
+ 0x2DD000002DD7,
+ 0x2DD800002DDF,
+ 0x2DE000002E00,
+ 0x2E2F00002E30,
+ 0x300500003008,
+ 0x302A0000302E,
+ 0x303C0000303D,
+ 0x304100003097,
+ 0x30990000309B,
+ 0x309D0000309F,
+ 0x30A1000030FB,
+ 0x30FC000030FF,
+ 0x310500003130,
+ 0x31A0000031C0,
+ 0x31F000003200,
+ 0x340000004DC0,
+ 0x4E000000A48D,
+ 0xA4D00000A4FE,
+ 0xA5000000A60D,
+ 0xA6100000A62C,
+ 0xA6410000A642,
+ 0xA6430000A644,
+ 0xA6450000A646,
+ 0xA6470000A648,
+ 0xA6490000A64A,
+ 0xA64B0000A64C,
+ 0xA64D0000A64E,
+ 0xA64F0000A650,
+ 0xA6510000A652,
+ 0xA6530000A654,
+ 0xA6550000A656,
+ 0xA6570000A658,
+ 0xA6590000A65A,
+ 0xA65B0000A65C,
+ 0xA65D0000A65E,
+ 0xA65F0000A660,
+ 0xA6610000A662,
+ 0xA6630000A664,
+ 0xA6650000A666,
+ 0xA6670000A668,
+ 0xA6690000A66A,
+ 0xA66B0000A66C,
+ 0xA66D0000A670,
+ 0xA6740000A67E,
+ 0xA67F0000A680,
+ 0xA6810000A682,
+ 0xA6830000A684,
+ 0xA6850000A686,
+ 0xA6870000A688,
+ 0xA6890000A68A,
+ 0xA68B0000A68C,
+ 0xA68D0000A68E,
+ 0xA68F0000A690,
+ 0xA6910000A692,
+ 0xA6930000A694,
+ 0xA6950000A696,
+ 0xA6970000A698,
+ 0xA6990000A69A,
+ 0xA69B0000A69C,
+ 0xA69E0000A6E6,
+ 0xA6F00000A6F2,
+ 0xA7170000A720,
+ 0xA7230000A724,
+ 0xA7250000A726,
+ 0xA7270000A728,
+ 0xA7290000A72A,
+ 0xA72B0000A72C,
+ 0xA72D0000A72E,
+ 0xA72F0000A732,
+ 0xA7330000A734,
+ 0xA7350000A736,
+ 0xA7370000A738,
+ 0xA7390000A73A,
+ 0xA73B0000A73C,
+ 0xA73D0000A73E,
+ 0xA73F0000A740,
+ 0xA7410000A742,
+ 0xA7430000A744,
+ 0xA7450000A746,
+ 0xA7470000A748,
+ 0xA7490000A74A,
+ 0xA74B0000A74C,
+ 0xA74D0000A74E,
+ 0xA74F0000A750,
+ 0xA7510000A752,
+ 0xA7530000A754,
+ 0xA7550000A756,
+ 0xA7570000A758,
+ 0xA7590000A75A,
+ 0xA75B0000A75C,
+ 0xA75D0000A75E,
+ 0xA75F0000A760,
+ 0xA7610000A762,
+ 0xA7630000A764,
+ 0xA7650000A766,
+ 0xA7670000A768,
+ 0xA7690000A76A,
+ 0xA76B0000A76C,
+ 0xA76D0000A76E,
+ 0xA76F0000A770,
+ 0xA7710000A779,
+ 0xA77A0000A77B,
+ 0xA77C0000A77D,
+ 0xA77F0000A780,
+ 0xA7810000A782,
+ 0xA7830000A784,
+ 0xA7850000A786,
+ 0xA7870000A789,
+ 0xA78C0000A78D,
+ 0xA78E0000A790,
+ 0xA7910000A792,
+ 0xA7930000A796,
+ 0xA7970000A798,
+ 0xA7990000A79A,
+ 0xA79B0000A79C,
+ 0xA79D0000A79E,
+ 0xA79F0000A7A0,
+ 0xA7A10000A7A2,
+ 0xA7A30000A7A4,
+ 0xA7A50000A7A6,
+ 0xA7A70000A7A8,
+ 0xA7A90000A7AA,
+ 0xA7AF0000A7B0,
+ 0xA7B50000A7B6,
+ 0xA7B70000A7B8,
+ 0xA7B90000A7BA,
+ 0xA7BB0000A7BC,
+ 0xA7BD0000A7BE,
+ 0xA7BF0000A7C0,
+ 0xA7C10000A7C2,
+ 0xA7C30000A7C4,
+ 0xA7C80000A7C9,
+ 0xA7CA0000A7CB,
+ 0xA7CD0000A7CE,
+ 0xA7D10000A7D2,
+ 0xA7D30000A7D4,
+ 0xA7D50000A7D6,
+ 0xA7D70000A7D8,
+ 0xA7D90000A7DA,
+ 0xA7DB0000A7DC,
+ 0xA7F60000A7F8,
+ 0xA7FA0000A828,
+ 0xA82C0000A82D,
+ 0xA8400000A874,
+ 0xA8800000A8C6,
+ 0xA8D00000A8DA,
+ 0xA8E00000A8F8,
+ 0xA8FB0000A8FC,
+ 0xA8FD0000A92E,
+ 0xA9300000A954,
+ 0xA9800000A9C1,
+ 0xA9CF0000A9DA,
+ 0xA9E00000A9FF,
+ 0xAA000000AA37,
+ 0xAA400000AA4E,
+ 0xAA500000AA5A,
+ 0xAA600000AA77,
+ 0xAA7A0000AAC3,
+ 0xAADB0000AADE,
+ 0xAAE00000AAF0,
+ 0xAAF20000AAF7,
+ 0xAB010000AB07,
+ 0xAB090000AB0F,
+ 0xAB110000AB17,
+ 0xAB200000AB27,
+ 0xAB280000AB2F,
+ 0xAB300000AB5B,
+ 0xAB600000AB69,
+ 0xABC00000ABEB,
+ 0xABEC0000ABEE,
+ 0xABF00000ABFA,
+ 0xAC000000D7A4,
+ 0xFA0E0000FA10,
+ 0xFA110000FA12,
+ 0xFA130000FA15,
+ 0xFA1F0000FA20,
+ 0xFA210000FA22,
+ 0xFA230000FA25,
+ 0xFA270000FA2A,
+ 0xFB1E0000FB1F,
+ 0xFE200000FE30,
+ 0xFE730000FE74,
+ 0x100000001000C,
+ 0x1000D00010027,
+ 0x100280001003B,
+ 0x1003C0001003E,
+ 0x1003F0001004E,
+ 0x100500001005E,
+ 0x10080000100FB,
+ 0x101FD000101FE,
+ 0x102800001029D,
+ 0x102A0000102D1,
+ 0x102E0000102E1,
+ 0x1030000010320,
+ 0x1032D00010341,
+ 0x103420001034A,
+ 0x103500001037B,
+ 0x103800001039E,
+ 0x103A0000103C4,
+ 0x103C8000103D0,
+ 0x104280001049E,
+ 0x104A0000104AA,
+ 0x104D8000104FC,
+ 0x1050000010528,
+ 0x1053000010564,
+ 0x10597000105A2,
+ 0x105A3000105B2,
+ 0x105B3000105BA,
+ 0x105BB000105BD,
+ 0x105C0000105F4,
+ 0x1060000010737,
+ 0x1074000010756,
+ 0x1076000010768,
+ 0x1078000010781,
+ 0x1080000010806,
+ 0x1080800010809,
+ 0x1080A00010836,
+ 0x1083700010839,
+ 0x1083C0001083D,
+ 0x1083F00010856,
+ 0x1086000010877,
+ 0x108800001089F,
+ 0x108E0000108F3,
+ 0x108F4000108F6,
+ 0x1090000010916,
+ 0x109200001093A,
+ 0x10980000109B8,
+ 0x109BE000109C0,
+ 0x10A0000010A04,
+ 0x10A0500010A07,
+ 0x10A0C00010A14,
+ 0x10A1500010A18,
+ 0x10A1900010A36,
+ 0x10A3800010A3B,
+ 0x10A3F00010A40,
+ 0x10A6000010A7D,
+ 0x10A8000010A9D,
+ 0x10AC000010AC8,
+ 0x10AC900010AE7,
+ 0x10B0000010B36,
+ 0x10B4000010B56,
+ 0x10B6000010B73,
+ 0x10B8000010B92,
+ 0x10C0000010C49,
+ 0x10CC000010CF3,
+ 0x10D0000010D28,
+ 0x10D3000010D3A,
+ 0x10D4000010D50,
+ 0x10D6900010D6E,
+ 0x10D6F00010D86,
+ 0x10E8000010EAA,
+ 0x10EAB00010EAD,
+ 0x10EB000010EB2,
+ 0x10EC200010EC5,
+ 0x10EFC00010F1D,
+ 0x10F2700010F28,
+ 0x10F3000010F51,
+ 0x10F7000010F86,
+ 0x10FB000010FC5,
+ 0x10FE000010FF7,
+ 0x1100000011047,
+ 0x1106600011076,
+ 0x1107F000110BB,
+ 0x110C2000110C3,
+ 0x110D0000110E9,
+ 0x110F0000110FA,
+ 0x1110000011135,
+ 0x1113600011140,
+ 0x1114400011148,
+ 0x1115000011174,
+ 0x1117600011177,
+ 0x11180000111C5,
+ 0x111C9000111CD,
+ 0x111CE000111DB,
+ 0x111DC000111DD,
+ 0x1120000011212,
+ 0x1121300011238,
+ 0x1123E00011242,
+ 0x1128000011287,
+ 0x1128800011289,
+ 0x1128A0001128E,
+ 0x1128F0001129E,
+ 0x1129F000112A9,
+ 0x112B0000112EB,
+ 0x112F0000112FA,
+ 0x1130000011304,
+ 0x113050001130D,
+ 0x1130F00011311,
+ 0x1131300011329,
+ 0x1132A00011331,
+ 0x1133200011334,
+ 0x113350001133A,
+ 0x1133B00011345,
+ 0x1134700011349,
+ 0x1134B0001134E,
+ 0x1135000011351,
+ 0x1135700011358,
+ 0x1135D00011364,
+ 0x113660001136D,
+ 0x1137000011375,
+ 0x113800001138A,
+ 0x1138B0001138C,
+ 0x1138E0001138F,
+ 0x11390000113B6,
+ 0x113B7000113C1,
+ 0x113C2000113C3,
+ 0x113C5000113C6,
+ 0x113C7000113CB,
+ 0x113CC000113D4,
+ 0x113E1000113E3,
+ 0x114000001144B,
+ 0x114500001145A,
+ 0x1145E00011462,
+ 0x11480000114C6,
+ 0x114C7000114C8,
+ 0x114D0000114DA,
+ 0x11580000115B6,
+ 0x115B8000115C1,
+ 0x115D8000115DE,
+ 0x1160000011641,
+ 0x1164400011645,
+ 0x116500001165A,
+ 0x11680000116B9,
+ 0x116C0000116CA,
+ 0x116D0000116E4,
+ 0x117000001171B,
+ 0x1171D0001172C,
+ 0x117300001173A,
+ 0x1174000011747,
+ 0x118000001183B,
+ 0x118C0000118EA,
+ 0x118FF00011907,
+ 0x119090001190A,
+ 0x1190C00011914,
+ 0x1191500011917,
+ 0x1191800011936,
+ 0x1193700011939,
+ 0x1193B00011944,
+ 0x119500001195A,
+ 0x119A0000119A8,
+ 0x119AA000119D8,
+ 0x119DA000119E2,
+ 0x119E3000119E5,
+ 0x11A0000011A3F,
+ 0x11A4700011A48,
+ 0x11A5000011A9A,
+ 0x11A9D00011A9E,
+ 0x11AB000011AF9,
+ 0x11BC000011BE1,
+ 0x11BF000011BFA,
+ 0x11C0000011C09,
+ 0x11C0A00011C37,
+ 0x11C3800011C41,
+ 0x11C5000011C5A,
+ 0x11C7200011C90,
+ 0x11C9200011CA8,
+ 0x11CA900011CB7,
+ 0x11D0000011D07,
+ 0x11D0800011D0A,
+ 0x11D0B00011D37,
+ 0x11D3A00011D3B,
+ 0x11D3C00011D3E,
+ 0x11D3F00011D48,
+ 0x11D5000011D5A,
+ 0x11D6000011D66,
+ 0x11D6700011D69,
+ 0x11D6A00011D8F,
+ 0x11D9000011D92,
+ 0x11D9300011D99,
+ 0x11DA000011DAA,
+ 0x11EE000011EF7,
+ 0x11F0000011F11,
+ 0x11F1200011F3B,
+ 0x11F3E00011F43,
+ 0x11F5000011F5B,
+ 0x11FB000011FB1,
+ 0x120000001239A,
+ 0x1248000012544,
+ 0x12F9000012FF1,
+ 0x1300000013430,
+ 0x1344000013456,
+ 0x13460000143FB,
+ 0x1440000014647,
+ 0x161000001613A,
+ 0x1680000016A39,
+ 0x16A4000016A5F,
+ 0x16A6000016A6A,
+ 0x16A7000016ABF,
+ 0x16AC000016ACA,
+ 0x16AD000016AEE,
+ 0x16AF000016AF5,
+ 0x16B0000016B37,
+ 0x16B4000016B44,
+ 0x16B5000016B5A,
+ 0x16B6300016B78,
+ 0x16B7D00016B90,
+ 0x16D4000016D6D,
+ 0x16D7000016D7A,
+ 0x16E6000016E80,
+ 0x16F0000016F4B,
+ 0x16F4F00016F88,
+ 0x16F8F00016FA0,
+ 0x16FE000016FE2,
+ 0x16FE300016FE5,
+ 0x16FF000016FF2,
+ 0x17000000187F8,
+ 0x1880000018CD6,
+ 0x18CFF00018D09,
+ 0x1AFF00001AFF4,
+ 0x1AFF50001AFFC,
+ 0x1AFFD0001AFFF,
+ 0x1B0000001B123,
+ 0x1B1320001B133,
+ 0x1B1500001B153,
+ 0x1B1550001B156,
+ 0x1B1640001B168,
+ 0x1B1700001B2FC,
+ 0x1BC000001BC6B,
+ 0x1BC700001BC7D,
+ 0x1BC800001BC89,
+ 0x1BC900001BC9A,
+ 0x1BC9D0001BC9F,
+ 0x1CCF00001CCFA,
+ 0x1CF000001CF2E,
+ 0x1CF300001CF47,
+ 0x1DA000001DA37,
+ 0x1DA3B0001DA6D,
+ 0x1DA750001DA76,
+ 0x1DA840001DA85,
+ 0x1DA9B0001DAA0,
+ 0x1DAA10001DAB0,
+ 0x1DF000001DF1F,
+ 0x1DF250001DF2B,
+ 0x1E0000001E007,
+ 0x1E0080001E019,
+ 0x1E01B0001E022,
+ 0x1E0230001E025,
+ 0x1E0260001E02B,
+ 0x1E08F0001E090,
+ 0x1E1000001E12D,
+ 0x1E1300001E13E,
+ 0x1E1400001E14A,
+ 0x1E14E0001E14F,
+ 0x1E2900001E2AF,
+ 0x1E2C00001E2FA,
+ 0x1E4D00001E4FA,
+ 0x1E5D00001E5FB,
+ 0x1E7E00001E7E7,
+ 0x1E7E80001E7EC,
+ 0x1E7ED0001E7EF,
+ 0x1E7F00001E7FF,
+ 0x1E8000001E8C5,
+ 0x1E8D00001E8D7,
+ 0x1E9220001E94C,
+ 0x1E9500001E95A,
+ 0x200000002A6E0,
+ 0x2A7000002B73A,
+ 0x2B7400002B81E,
+ 0x2B8200002CEA2,
+ 0x2CEB00002EBE1,
+ 0x2EBF00002EE5E,
+ 0x300000003134B,
+ 0x31350000323B0,
+ ),
+ "CONTEXTJ": (0x200C0000200E,),
+ "CONTEXTO": (
+ 0xB7000000B8,
+ 0x37500000376,
+ 0x5F3000005F5,
+ 0x6600000066A,
+ 0x6F0000006FA,
+ 0x30FB000030FC,
+ ),
+}
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/intranges.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/intranges.py"
new file mode 100644
index 0000000..7bfaa8d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/intranges.py"
@@ -0,0 +1,57 @@
+"""
+Given a list of integers, made up of (hopefully) a small number of long runs
+of consecutive integers, compute a representation of the form
+((start1, end1), (start2, end2) ...). Then answer the question "was x present
+in the original list?" in time O(log(# runs)).
+"""
+
+import bisect
+from typing import List, Tuple
+
+
+def intranges_from_list(list_: List[int]) -> Tuple[int, ...]:
+ """Represent a list of integers as a sequence of ranges:
+ ((start_0, end_0), (start_1, end_1), ...), such that the original
+ integers are exactly those x such that start_i <= x < end_i for some i.
+
+ Ranges are encoded as single integers (start << 32 | end), not as tuples.
+ """
+
+ sorted_list = sorted(list_)
+ ranges = []
+ last_write = -1
+ for i in range(len(sorted_list)):
+ if i + 1 < len(sorted_list):
+ if sorted_list[i] == sorted_list[i + 1] - 1:
+ continue
+ current_range = sorted_list[last_write + 1 : i + 1]
+ ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
+ last_write = i
+
+ return tuple(ranges)
+
+
+def _encode_range(start: int, end: int) -> int:
+ return (start << 32) | end
+
+
+def _decode_range(r: int) -> Tuple[int, int]:
+ return (r >> 32), (r & ((1 << 32) - 1))
+
+
+def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool:
+ """Determine if `int_` falls into one of the ranges in `ranges`."""
+ tuple_ = _encode_range(int_, 0)
+ pos = bisect.bisect_left(ranges, tuple_)
+ # we could be immediately ahead of a tuple (start, end)
+ # with start < int_ <= end
+ if pos > 0:
+ left, right = _decode_range(ranges[pos - 1])
+ if left <= int_ < right:
+ return True
+ # or we could be immediately behind a tuple (int_, end)
+ if pos < len(ranges):
+ left, _ = _decode_range(ranges[pos])
+ if left == int_:
+ return True
+ return False
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/package_data.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/package_data.py"
new file mode 100644
index 0000000..7272c8d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/package_data.py"
@@ -0,0 +1 @@
+__version__ = "3.11"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/py.typed"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/py.typed"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/uts46data.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/uts46data.py"
new file mode 100644
index 0000000..4610b71
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/idna/uts46data.py"
@@ -0,0 +1,8841 @@
+# This file is automatically generated by tools/idna-data
+# vim: set fileencoding=utf-8 :
+
+from typing import List, Tuple, Union
+
+"""IDNA Mapping Table from UTS46."""
+
+
+__version__ = "16.0.0"
+
+
+def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x0, "V"),
+ (0x1, "V"),
+ (0x2, "V"),
+ (0x3, "V"),
+ (0x4, "V"),
+ (0x5, "V"),
+ (0x6, "V"),
+ (0x7, "V"),
+ (0x8, "V"),
+ (0x9, "V"),
+ (0xA, "V"),
+ (0xB, "V"),
+ (0xC, "V"),
+ (0xD, "V"),
+ (0xE, "V"),
+ (0xF, "V"),
+ (0x10, "V"),
+ (0x11, "V"),
+ (0x12, "V"),
+ (0x13, "V"),
+ (0x14, "V"),
+ (0x15, "V"),
+ (0x16, "V"),
+ (0x17, "V"),
+ (0x18, "V"),
+ (0x19, "V"),
+ (0x1A, "V"),
+ (0x1B, "V"),
+ (0x1C, "V"),
+ (0x1D, "V"),
+ (0x1E, "V"),
+ (0x1F, "V"),
+ (0x20, "V"),
+ (0x21, "V"),
+ (0x22, "V"),
+ (0x23, "V"),
+ (0x24, "V"),
+ (0x25, "V"),
+ (0x26, "V"),
+ (0x27, "V"),
+ (0x28, "V"),
+ (0x29, "V"),
+ (0x2A, "V"),
+ (0x2B, "V"),
+ (0x2C, "V"),
+ (0x2D, "V"),
+ (0x2E, "V"),
+ (0x2F, "V"),
+ (0x30, "V"),
+ (0x31, "V"),
+ (0x32, "V"),
+ (0x33, "V"),
+ (0x34, "V"),
+ (0x35, "V"),
+ (0x36, "V"),
+ (0x37, "V"),
+ (0x38, "V"),
+ (0x39, "V"),
+ (0x3A, "V"),
+ (0x3B, "V"),
+ (0x3C, "V"),
+ (0x3D, "V"),
+ (0x3E, "V"),
+ (0x3F, "V"),
+ (0x40, "V"),
+ (0x41, "M", "a"),
+ (0x42, "M", "b"),
+ (0x43, "M", "c"),
+ (0x44, "M", "d"),
+ (0x45, "M", "e"),
+ (0x46, "M", "f"),
+ (0x47, "M", "g"),
+ (0x48, "M", "h"),
+ (0x49, "M", "i"),
+ (0x4A, "M", "j"),
+ (0x4B, "M", "k"),
+ (0x4C, "M", "l"),
+ (0x4D, "M", "m"),
+ (0x4E, "M", "n"),
+ (0x4F, "M", "o"),
+ (0x50, "M", "p"),
+ (0x51, "M", "q"),
+ (0x52, "M", "r"),
+ (0x53, "M", "s"),
+ (0x54, "M", "t"),
+ (0x55, "M", "u"),
+ (0x56, "M", "v"),
+ (0x57, "M", "w"),
+ (0x58, "M", "x"),
+ (0x59, "M", "y"),
+ (0x5A, "M", "z"),
+ (0x5B, "V"),
+ (0x5C, "V"),
+ (0x5D, "V"),
+ (0x5E, "V"),
+ (0x5F, "V"),
+ (0x60, "V"),
+ (0x61, "V"),
+ (0x62, "V"),
+ (0x63, "V"),
+ ]
+
+
+def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x64, "V"),
+ (0x65, "V"),
+ (0x66, "V"),
+ (0x67, "V"),
+ (0x68, "V"),
+ (0x69, "V"),
+ (0x6A, "V"),
+ (0x6B, "V"),
+ (0x6C, "V"),
+ (0x6D, "V"),
+ (0x6E, "V"),
+ (0x6F, "V"),
+ (0x70, "V"),
+ (0x71, "V"),
+ (0x72, "V"),
+ (0x73, "V"),
+ (0x74, "V"),
+ (0x75, "V"),
+ (0x76, "V"),
+ (0x77, "V"),
+ (0x78, "V"),
+ (0x79, "V"),
+ (0x7A, "V"),
+ (0x7B, "V"),
+ (0x7C, "V"),
+ (0x7D, "V"),
+ (0x7E, "V"),
+ (0x7F, "V"),
+ (0x80, "X"),
+ (0x81, "X"),
+ (0x82, "X"),
+ (0x83, "X"),
+ (0x84, "X"),
+ (0x85, "X"),
+ (0x86, "X"),
+ (0x87, "X"),
+ (0x88, "X"),
+ (0x89, "X"),
+ (0x8A, "X"),
+ (0x8B, "X"),
+ (0x8C, "X"),
+ (0x8D, "X"),
+ (0x8E, "X"),
+ (0x8F, "X"),
+ (0x90, "X"),
+ (0x91, "X"),
+ (0x92, "X"),
+ (0x93, "X"),
+ (0x94, "X"),
+ (0x95, "X"),
+ (0x96, "X"),
+ (0x97, "X"),
+ (0x98, "X"),
+ (0x99, "X"),
+ (0x9A, "X"),
+ (0x9B, "X"),
+ (0x9C, "X"),
+ (0x9D, "X"),
+ (0x9E, "X"),
+ (0x9F, "X"),
+ (0xA0, "M", " "),
+ (0xA1, "V"),
+ (0xA2, "V"),
+ (0xA3, "V"),
+ (0xA4, "V"),
+ (0xA5, "V"),
+ (0xA6, "V"),
+ (0xA7, "V"),
+ (0xA8, "M", " ̈"),
+ (0xA9, "V"),
+ (0xAA, "M", "a"),
+ (0xAB, "V"),
+ (0xAC, "V"),
+ (0xAD, "I"),
+ (0xAE, "V"),
+ (0xAF, "M", " ̄"),
+ (0xB0, "V"),
+ (0xB1, "V"),
+ (0xB2, "M", "2"),
+ (0xB3, "M", "3"),
+ (0xB4, "M", " ́"),
+ (0xB5, "M", "μ"),
+ (0xB6, "V"),
+ (0xB7, "V"),
+ (0xB8, "M", " ̧"),
+ (0xB9, "M", "1"),
+ (0xBA, "M", "o"),
+ (0xBB, "V"),
+ (0xBC, "M", "1⁄4"),
+ (0xBD, "M", "1⁄2"),
+ (0xBE, "M", "3⁄4"),
+ (0xBF, "V"),
+ (0xC0, "M", "à"),
+ (0xC1, "M", "á"),
+ (0xC2, "M", "â"),
+ (0xC3, "M", "ã"),
+ (0xC4, "M", "ä"),
+ (0xC5, "M", "å"),
+ (0xC6, "M", "æ"),
+ (0xC7, "M", "ç"),
+ ]
+
+
+def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xC8, "M", "è"),
+ (0xC9, "M", "é"),
+ (0xCA, "M", "ê"),
+ (0xCB, "M", "ë"),
+ (0xCC, "M", "ì"),
+ (0xCD, "M", "í"),
+ (0xCE, "M", "î"),
+ (0xCF, "M", "ï"),
+ (0xD0, "M", "ð"),
+ (0xD1, "M", "ñ"),
+ (0xD2, "M", "ò"),
+ (0xD3, "M", "ó"),
+ (0xD4, "M", "ô"),
+ (0xD5, "M", "õ"),
+ (0xD6, "M", "ö"),
+ (0xD7, "V"),
+ (0xD8, "M", "ø"),
+ (0xD9, "M", "ù"),
+ (0xDA, "M", "ú"),
+ (0xDB, "M", "û"),
+ (0xDC, "M", "ü"),
+ (0xDD, "M", "ý"),
+ (0xDE, "M", "þ"),
+ (0xDF, "D", "ss"),
+ (0xE0, "V"),
+ (0xE1, "V"),
+ (0xE2, "V"),
+ (0xE3, "V"),
+ (0xE4, "V"),
+ (0xE5, "V"),
+ (0xE6, "V"),
+ (0xE7, "V"),
+ (0xE8, "V"),
+ (0xE9, "V"),
+ (0xEA, "V"),
+ (0xEB, "V"),
+ (0xEC, "V"),
+ (0xED, "V"),
+ (0xEE, "V"),
+ (0xEF, "V"),
+ (0xF0, "V"),
+ (0xF1, "V"),
+ (0xF2, "V"),
+ (0xF3, "V"),
+ (0xF4, "V"),
+ (0xF5, "V"),
+ (0xF6, "V"),
+ (0xF7, "V"),
+ (0xF8, "V"),
+ (0xF9, "V"),
+ (0xFA, "V"),
+ (0xFB, "V"),
+ (0xFC, "V"),
+ (0xFD, "V"),
+ (0xFE, "V"),
+ (0xFF, "V"),
+ (0x100, "M", "ā"),
+ (0x101, "V"),
+ (0x102, "M", "ă"),
+ (0x103, "V"),
+ (0x104, "M", "ą"),
+ (0x105, "V"),
+ (0x106, "M", "ć"),
+ (0x107, "V"),
+ (0x108, "M", "ĉ"),
+ (0x109, "V"),
+ (0x10A, "M", "ċ"),
+ (0x10B, "V"),
+ (0x10C, "M", "č"),
+ (0x10D, "V"),
+ (0x10E, "M", "ď"),
+ (0x10F, "V"),
+ (0x110, "M", "đ"),
+ (0x111, "V"),
+ (0x112, "M", "ē"),
+ (0x113, "V"),
+ (0x114, "M", "ĕ"),
+ (0x115, "V"),
+ (0x116, "M", "ė"),
+ (0x117, "V"),
+ (0x118, "M", "ę"),
+ (0x119, "V"),
+ (0x11A, "M", "ě"),
+ (0x11B, "V"),
+ (0x11C, "M", "ĝ"),
+ (0x11D, "V"),
+ (0x11E, "M", "ğ"),
+ (0x11F, "V"),
+ (0x120, "M", "ġ"),
+ (0x121, "V"),
+ (0x122, "M", "ģ"),
+ (0x123, "V"),
+ (0x124, "M", "ĥ"),
+ (0x125, "V"),
+ (0x126, "M", "ħ"),
+ (0x127, "V"),
+ (0x128, "M", "ĩ"),
+ (0x129, "V"),
+ (0x12A, "M", "ī"),
+ (0x12B, "V"),
+ ]
+
+
+def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x12C, "M", "ĭ"),
+ (0x12D, "V"),
+ (0x12E, "M", "į"),
+ (0x12F, "V"),
+ (0x130, "M", "i̇"),
+ (0x131, "V"),
+ (0x132, "M", "ij"),
+ (0x134, "M", "ĵ"),
+ (0x135, "V"),
+ (0x136, "M", "ķ"),
+ (0x137, "V"),
+ (0x139, "M", "ĺ"),
+ (0x13A, "V"),
+ (0x13B, "M", "ļ"),
+ (0x13C, "V"),
+ (0x13D, "M", "ľ"),
+ (0x13E, "V"),
+ (0x13F, "M", "l·"),
+ (0x141, "M", "ł"),
+ (0x142, "V"),
+ (0x143, "M", "ń"),
+ (0x144, "V"),
+ (0x145, "M", "ņ"),
+ (0x146, "V"),
+ (0x147, "M", "ň"),
+ (0x148, "V"),
+ (0x149, "M", "ʼn"),
+ (0x14A, "M", "ŋ"),
+ (0x14B, "V"),
+ (0x14C, "M", "ō"),
+ (0x14D, "V"),
+ (0x14E, "M", "ŏ"),
+ (0x14F, "V"),
+ (0x150, "M", "ő"),
+ (0x151, "V"),
+ (0x152, "M", "œ"),
+ (0x153, "V"),
+ (0x154, "M", "ŕ"),
+ (0x155, "V"),
+ (0x156, "M", "ŗ"),
+ (0x157, "V"),
+ (0x158, "M", "ř"),
+ (0x159, "V"),
+ (0x15A, "M", "ś"),
+ (0x15B, "V"),
+ (0x15C, "M", "ŝ"),
+ (0x15D, "V"),
+ (0x15E, "M", "ş"),
+ (0x15F, "V"),
+ (0x160, "M", "š"),
+ (0x161, "V"),
+ (0x162, "M", "ţ"),
+ (0x163, "V"),
+ (0x164, "M", "ť"),
+ (0x165, "V"),
+ (0x166, "M", "ŧ"),
+ (0x167, "V"),
+ (0x168, "M", "ũ"),
+ (0x169, "V"),
+ (0x16A, "M", "ū"),
+ (0x16B, "V"),
+ (0x16C, "M", "ŭ"),
+ (0x16D, "V"),
+ (0x16E, "M", "ů"),
+ (0x16F, "V"),
+ (0x170, "M", "ű"),
+ (0x171, "V"),
+ (0x172, "M", "ų"),
+ (0x173, "V"),
+ (0x174, "M", "ŵ"),
+ (0x175, "V"),
+ (0x176, "M", "ŷ"),
+ (0x177, "V"),
+ (0x178, "M", "ÿ"),
+ (0x179, "M", "ź"),
+ (0x17A, "V"),
+ (0x17B, "M", "ż"),
+ (0x17C, "V"),
+ (0x17D, "M", "ž"),
+ (0x17E, "V"),
+ (0x17F, "M", "s"),
+ (0x180, "V"),
+ (0x181, "M", "ɓ"),
+ (0x182, "M", "ƃ"),
+ (0x183, "V"),
+ (0x184, "M", "ƅ"),
+ (0x185, "V"),
+ (0x186, "M", "ɔ"),
+ (0x187, "M", "ƈ"),
+ (0x188, "V"),
+ (0x189, "M", "ɖ"),
+ (0x18A, "M", "ɗ"),
+ (0x18B, "M", "ƌ"),
+ (0x18C, "V"),
+ (0x18E, "M", "ǝ"),
+ (0x18F, "M", "ə"),
+ (0x190, "M", "ɛ"),
+ (0x191, "M", "ƒ"),
+ (0x192, "V"),
+ (0x193, "M", "ɠ"),
+ ]
+
+
+def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x194, "M", "ɣ"),
+ (0x195, "V"),
+ (0x196, "M", "ɩ"),
+ (0x197, "M", "ɨ"),
+ (0x198, "M", "ƙ"),
+ (0x199, "V"),
+ (0x19C, "M", "ɯ"),
+ (0x19D, "M", "ɲ"),
+ (0x19E, "V"),
+ (0x19F, "M", "ɵ"),
+ (0x1A0, "M", "ơ"),
+ (0x1A1, "V"),
+ (0x1A2, "M", "ƣ"),
+ (0x1A3, "V"),
+ (0x1A4, "M", "ƥ"),
+ (0x1A5, "V"),
+ (0x1A6, "M", "ʀ"),
+ (0x1A7, "M", "ƨ"),
+ (0x1A8, "V"),
+ (0x1A9, "M", "ʃ"),
+ (0x1AA, "V"),
+ (0x1AC, "M", "ƭ"),
+ (0x1AD, "V"),
+ (0x1AE, "M", "ʈ"),
+ (0x1AF, "M", "ư"),
+ (0x1B0, "V"),
+ (0x1B1, "M", "ʊ"),
+ (0x1B2, "M", "ʋ"),
+ (0x1B3, "M", "ƴ"),
+ (0x1B4, "V"),
+ (0x1B5, "M", "ƶ"),
+ (0x1B6, "V"),
+ (0x1B7, "M", "ʒ"),
+ (0x1B8, "M", "ƹ"),
+ (0x1B9, "V"),
+ (0x1BC, "M", "ƽ"),
+ (0x1BD, "V"),
+ (0x1C4, "M", "dž"),
+ (0x1C7, "M", "lj"),
+ (0x1CA, "M", "nj"),
+ (0x1CD, "M", "ǎ"),
+ (0x1CE, "V"),
+ (0x1CF, "M", "ǐ"),
+ (0x1D0, "V"),
+ (0x1D1, "M", "ǒ"),
+ (0x1D2, "V"),
+ (0x1D3, "M", "ǔ"),
+ (0x1D4, "V"),
+ (0x1D5, "M", "ǖ"),
+ (0x1D6, "V"),
+ (0x1D7, "M", "ǘ"),
+ (0x1D8, "V"),
+ (0x1D9, "M", "ǚ"),
+ (0x1DA, "V"),
+ (0x1DB, "M", "ǜ"),
+ (0x1DC, "V"),
+ (0x1DE, "M", "ǟ"),
+ (0x1DF, "V"),
+ (0x1E0, "M", "ǡ"),
+ (0x1E1, "V"),
+ (0x1E2, "M", "ǣ"),
+ (0x1E3, "V"),
+ (0x1E4, "M", "ǥ"),
+ (0x1E5, "V"),
+ (0x1E6, "M", "ǧ"),
+ (0x1E7, "V"),
+ (0x1E8, "M", "ǩ"),
+ (0x1E9, "V"),
+ (0x1EA, "M", "ǫ"),
+ (0x1EB, "V"),
+ (0x1EC, "M", "ǭ"),
+ (0x1ED, "V"),
+ (0x1EE, "M", "ǯ"),
+ (0x1EF, "V"),
+ (0x1F1, "M", "dz"),
+ (0x1F4, "M", "ǵ"),
+ (0x1F5, "V"),
+ (0x1F6, "M", "ƕ"),
+ (0x1F7, "M", "ƿ"),
+ (0x1F8, "M", "ǹ"),
+ (0x1F9, "V"),
+ (0x1FA, "M", "ǻ"),
+ (0x1FB, "V"),
+ (0x1FC, "M", "ǽ"),
+ (0x1FD, "V"),
+ (0x1FE, "M", "ǿ"),
+ (0x1FF, "V"),
+ (0x200, "M", "ȁ"),
+ (0x201, "V"),
+ (0x202, "M", "ȃ"),
+ (0x203, "V"),
+ (0x204, "M", "ȅ"),
+ (0x205, "V"),
+ (0x206, "M", "ȇ"),
+ (0x207, "V"),
+ (0x208, "M", "ȉ"),
+ (0x209, "V"),
+ (0x20A, "M", "ȋ"),
+ (0x20B, "V"),
+ (0x20C, "M", "ȍ"),
+ ]
+
+
+def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x20D, "V"),
+ (0x20E, "M", "ȏ"),
+ (0x20F, "V"),
+ (0x210, "M", "ȑ"),
+ (0x211, "V"),
+ (0x212, "M", "ȓ"),
+ (0x213, "V"),
+ (0x214, "M", "ȕ"),
+ (0x215, "V"),
+ (0x216, "M", "ȗ"),
+ (0x217, "V"),
+ (0x218, "M", "ș"),
+ (0x219, "V"),
+ (0x21A, "M", "ț"),
+ (0x21B, "V"),
+ (0x21C, "M", "ȝ"),
+ (0x21D, "V"),
+ (0x21E, "M", "ȟ"),
+ (0x21F, "V"),
+ (0x220, "M", "ƞ"),
+ (0x221, "V"),
+ (0x222, "M", "ȣ"),
+ (0x223, "V"),
+ (0x224, "M", "ȥ"),
+ (0x225, "V"),
+ (0x226, "M", "ȧ"),
+ (0x227, "V"),
+ (0x228, "M", "ȩ"),
+ (0x229, "V"),
+ (0x22A, "M", "ȫ"),
+ (0x22B, "V"),
+ (0x22C, "M", "ȭ"),
+ (0x22D, "V"),
+ (0x22E, "M", "ȯ"),
+ (0x22F, "V"),
+ (0x230, "M", "ȱ"),
+ (0x231, "V"),
+ (0x232, "M", "ȳ"),
+ (0x233, "V"),
+ (0x23A, "M", "ⱥ"),
+ (0x23B, "M", "ȼ"),
+ (0x23C, "V"),
+ (0x23D, "M", "ƚ"),
+ (0x23E, "M", "ⱦ"),
+ (0x23F, "V"),
+ (0x241, "M", "ɂ"),
+ (0x242, "V"),
+ (0x243, "M", "ƀ"),
+ (0x244, "M", "ʉ"),
+ (0x245, "M", "ʌ"),
+ (0x246, "M", "ɇ"),
+ (0x247, "V"),
+ (0x248, "M", "ɉ"),
+ (0x249, "V"),
+ (0x24A, "M", "ɋ"),
+ (0x24B, "V"),
+ (0x24C, "M", "ɍ"),
+ (0x24D, "V"),
+ (0x24E, "M", "ɏ"),
+ (0x24F, "V"),
+ (0x2B0, "M", "h"),
+ (0x2B1, "M", "ɦ"),
+ (0x2B2, "M", "j"),
+ (0x2B3, "M", "r"),
+ (0x2B4, "M", "ɹ"),
+ (0x2B5, "M", "ɻ"),
+ (0x2B6, "M", "ʁ"),
+ (0x2B7, "M", "w"),
+ (0x2B8, "M", "y"),
+ (0x2B9, "V"),
+ (0x2D8, "M", " ̆"),
+ (0x2D9, "M", " ̇"),
+ (0x2DA, "M", " ̊"),
+ (0x2DB, "M", " ̨"),
+ (0x2DC, "M", " ̃"),
+ (0x2DD, "M", " ̋"),
+ (0x2DE, "V"),
+ (0x2E0, "M", "ɣ"),
+ (0x2E1, "M", "l"),
+ (0x2E2, "M", "s"),
+ (0x2E3, "M", "x"),
+ (0x2E4, "M", "ʕ"),
+ (0x2E5, "V"),
+ (0x340, "M", "̀"),
+ (0x341, "M", "́"),
+ (0x342, "V"),
+ (0x343, "M", "̓"),
+ (0x344, "M", "̈́"),
+ (0x345, "M", "ι"),
+ (0x346, "V"),
+ (0x34F, "I"),
+ (0x350, "V"),
+ (0x370, "M", "ͱ"),
+ (0x371, "V"),
+ (0x372, "M", "ͳ"),
+ (0x373, "V"),
+ (0x374, "M", "ʹ"),
+ (0x375, "V"),
+ (0x376, "M", "ͷ"),
+ (0x377, "V"),
+ ]
+
+
+def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x378, "X"),
+ (0x37A, "M", " ι"),
+ (0x37B, "V"),
+ (0x37E, "M", ";"),
+ (0x37F, "M", "ϳ"),
+ (0x380, "X"),
+ (0x384, "M", " ́"),
+ (0x385, "M", " ̈́"),
+ (0x386, "M", "ά"),
+ (0x387, "M", "·"),
+ (0x388, "M", "έ"),
+ (0x389, "M", "ή"),
+ (0x38A, "M", "ί"),
+ (0x38B, "X"),
+ (0x38C, "M", "ό"),
+ (0x38D, "X"),
+ (0x38E, "M", "ύ"),
+ (0x38F, "M", "ώ"),
+ (0x390, "V"),
+ (0x391, "M", "α"),
+ (0x392, "M", "β"),
+ (0x393, "M", "γ"),
+ (0x394, "M", "δ"),
+ (0x395, "M", "ε"),
+ (0x396, "M", "ζ"),
+ (0x397, "M", "η"),
+ (0x398, "M", "θ"),
+ (0x399, "M", "ι"),
+ (0x39A, "M", "κ"),
+ (0x39B, "M", "λ"),
+ (0x39C, "M", "μ"),
+ (0x39D, "M", "ν"),
+ (0x39E, "M", "ξ"),
+ (0x39F, "M", "ο"),
+ (0x3A0, "M", "π"),
+ (0x3A1, "M", "ρ"),
+ (0x3A2, "X"),
+ (0x3A3, "M", "σ"),
+ (0x3A4, "M", "τ"),
+ (0x3A5, "M", "υ"),
+ (0x3A6, "M", "φ"),
+ (0x3A7, "M", "χ"),
+ (0x3A8, "M", "ψ"),
+ (0x3A9, "M", "ω"),
+ (0x3AA, "M", "ϊ"),
+ (0x3AB, "M", "ϋ"),
+ (0x3AC, "V"),
+ (0x3C2, "D", "σ"),
+ (0x3C3, "V"),
+ (0x3CF, "M", "ϗ"),
+ (0x3D0, "M", "β"),
+ (0x3D1, "M", "θ"),
+ (0x3D2, "M", "υ"),
+ (0x3D3, "M", "ύ"),
+ (0x3D4, "M", "ϋ"),
+ (0x3D5, "M", "φ"),
+ (0x3D6, "M", "π"),
+ (0x3D7, "V"),
+ (0x3D8, "M", "ϙ"),
+ (0x3D9, "V"),
+ (0x3DA, "M", "ϛ"),
+ (0x3DB, "V"),
+ (0x3DC, "M", "ϝ"),
+ (0x3DD, "V"),
+ (0x3DE, "M", "ϟ"),
+ (0x3DF, "V"),
+ (0x3E0, "M", "ϡ"),
+ (0x3E1, "V"),
+ (0x3E2, "M", "ϣ"),
+ (0x3E3, "V"),
+ (0x3E4, "M", "ϥ"),
+ (0x3E5, "V"),
+ (0x3E6, "M", "ϧ"),
+ (0x3E7, "V"),
+ (0x3E8, "M", "ϩ"),
+ (0x3E9, "V"),
+ (0x3EA, "M", "ϫ"),
+ (0x3EB, "V"),
+ (0x3EC, "M", "ϭ"),
+ (0x3ED, "V"),
+ (0x3EE, "M", "ϯ"),
+ (0x3EF, "V"),
+ (0x3F0, "M", "κ"),
+ (0x3F1, "M", "ρ"),
+ (0x3F2, "M", "σ"),
+ (0x3F3, "V"),
+ (0x3F4, "M", "θ"),
+ (0x3F5, "M", "ε"),
+ (0x3F6, "V"),
+ (0x3F7, "M", "ϸ"),
+ (0x3F8, "V"),
+ (0x3F9, "M", "σ"),
+ (0x3FA, "M", "ϻ"),
+ (0x3FB, "V"),
+ (0x3FD, "M", "ͻ"),
+ (0x3FE, "M", "ͼ"),
+ (0x3FF, "M", "ͽ"),
+ (0x400, "M", "ѐ"),
+ (0x401, "M", "ё"),
+ (0x402, "M", "ђ"),
+ ]
+
+
+def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x403, "M", "ѓ"),
+ (0x404, "M", "є"),
+ (0x405, "M", "ѕ"),
+ (0x406, "M", "і"),
+ (0x407, "M", "ї"),
+ (0x408, "M", "ј"),
+ (0x409, "M", "љ"),
+ (0x40A, "M", "њ"),
+ (0x40B, "M", "ћ"),
+ (0x40C, "M", "ќ"),
+ (0x40D, "M", "ѝ"),
+ (0x40E, "M", "ў"),
+ (0x40F, "M", "џ"),
+ (0x410, "M", "а"),
+ (0x411, "M", "б"),
+ (0x412, "M", "в"),
+ (0x413, "M", "г"),
+ (0x414, "M", "д"),
+ (0x415, "M", "е"),
+ (0x416, "M", "ж"),
+ (0x417, "M", "з"),
+ (0x418, "M", "и"),
+ (0x419, "M", "й"),
+ (0x41A, "M", "к"),
+ (0x41B, "M", "л"),
+ (0x41C, "M", "м"),
+ (0x41D, "M", "н"),
+ (0x41E, "M", "о"),
+ (0x41F, "M", "п"),
+ (0x420, "M", "р"),
+ (0x421, "M", "с"),
+ (0x422, "M", "т"),
+ (0x423, "M", "у"),
+ (0x424, "M", "ф"),
+ (0x425, "M", "х"),
+ (0x426, "M", "ц"),
+ (0x427, "M", "ч"),
+ (0x428, "M", "ш"),
+ (0x429, "M", "щ"),
+ (0x42A, "M", "ъ"),
+ (0x42B, "M", "ы"),
+ (0x42C, "M", "ь"),
+ (0x42D, "M", "э"),
+ (0x42E, "M", "ю"),
+ (0x42F, "M", "я"),
+ (0x430, "V"),
+ (0x460, "M", "ѡ"),
+ (0x461, "V"),
+ (0x462, "M", "ѣ"),
+ (0x463, "V"),
+ (0x464, "M", "ѥ"),
+ (0x465, "V"),
+ (0x466, "M", "ѧ"),
+ (0x467, "V"),
+ (0x468, "M", "ѩ"),
+ (0x469, "V"),
+ (0x46A, "M", "ѫ"),
+ (0x46B, "V"),
+ (0x46C, "M", "ѭ"),
+ (0x46D, "V"),
+ (0x46E, "M", "ѯ"),
+ (0x46F, "V"),
+ (0x470, "M", "ѱ"),
+ (0x471, "V"),
+ (0x472, "M", "ѳ"),
+ (0x473, "V"),
+ (0x474, "M", "ѵ"),
+ (0x475, "V"),
+ (0x476, "M", "ѷ"),
+ (0x477, "V"),
+ (0x478, "M", "ѹ"),
+ (0x479, "V"),
+ (0x47A, "M", "ѻ"),
+ (0x47B, "V"),
+ (0x47C, "M", "ѽ"),
+ (0x47D, "V"),
+ (0x47E, "M", "ѿ"),
+ (0x47F, "V"),
+ (0x480, "M", "ҁ"),
+ (0x481, "V"),
+ (0x48A, "M", "ҋ"),
+ (0x48B, "V"),
+ (0x48C, "M", "ҍ"),
+ (0x48D, "V"),
+ (0x48E, "M", "ҏ"),
+ (0x48F, "V"),
+ (0x490, "M", "ґ"),
+ (0x491, "V"),
+ (0x492, "M", "ғ"),
+ (0x493, "V"),
+ (0x494, "M", "ҕ"),
+ (0x495, "V"),
+ (0x496, "M", "җ"),
+ (0x497, "V"),
+ (0x498, "M", "ҙ"),
+ (0x499, "V"),
+ (0x49A, "M", "қ"),
+ (0x49B, "V"),
+ (0x49C, "M", "ҝ"),
+ (0x49D, "V"),
+ ]
+
+
+def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x49E, "M", "ҟ"),
+ (0x49F, "V"),
+ (0x4A0, "M", "ҡ"),
+ (0x4A1, "V"),
+ (0x4A2, "M", "ң"),
+ (0x4A3, "V"),
+ (0x4A4, "M", "ҥ"),
+ (0x4A5, "V"),
+ (0x4A6, "M", "ҧ"),
+ (0x4A7, "V"),
+ (0x4A8, "M", "ҩ"),
+ (0x4A9, "V"),
+ (0x4AA, "M", "ҫ"),
+ (0x4AB, "V"),
+ (0x4AC, "M", "ҭ"),
+ (0x4AD, "V"),
+ (0x4AE, "M", "ү"),
+ (0x4AF, "V"),
+ (0x4B0, "M", "ұ"),
+ (0x4B1, "V"),
+ (0x4B2, "M", "ҳ"),
+ (0x4B3, "V"),
+ (0x4B4, "M", "ҵ"),
+ (0x4B5, "V"),
+ (0x4B6, "M", "ҷ"),
+ (0x4B7, "V"),
+ (0x4B8, "M", "ҹ"),
+ (0x4B9, "V"),
+ (0x4BA, "M", "һ"),
+ (0x4BB, "V"),
+ (0x4BC, "M", "ҽ"),
+ (0x4BD, "V"),
+ (0x4BE, "M", "ҿ"),
+ (0x4BF, "V"),
+ (0x4C0, "M", "ӏ"),
+ (0x4C1, "M", "ӂ"),
+ (0x4C2, "V"),
+ (0x4C3, "M", "ӄ"),
+ (0x4C4, "V"),
+ (0x4C5, "M", "ӆ"),
+ (0x4C6, "V"),
+ (0x4C7, "M", "ӈ"),
+ (0x4C8, "V"),
+ (0x4C9, "M", "ӊ"),
+ (0x4CA, "V"),
+ (0x4CB, "M", "ӌ"),
+ (0x4CC, "V"),
+ (0x4CD, "M", "ӎ"),
+ (0x4CE, "V"),
+ (0x4D0, "M", "ӑ"),
+ (0x4D1, "V"),
+ (0x4D2, "M", "ӓ"),
+ (0x4D3, "V"),
+ (0x4D4, "M", "ӕ"),
+ (0x4D5, "V"),
+ (0x4D6, "M", "ӗ"),
+ (0x4D7, "V"),
+ (0x4D8, "M", "ә"),
+ (0x4D9, "V"),
+ (0x4DA, "M", "ӛ"),
+ (0x4DB, "V"),
+ (0x4DC, "M", "ӝ"),
+ (0x4DD, "V"),
+ (0x4DE, "M", "ӟ"),
+ (0x4DF, "V"),
+ (0x4E0, "M", "ӡ"),
+ (0x4E1, "V"),
+ (0x4E2, "M", "ӣ"),
+ (0x4E3, "V"),
+ (0x4E4, "M", "ӥ"),
+ (0x4E5, "V"),
+ (0x4E6, "M", "ӧ"),
+ (0x4E7, "V"),
+ (0x4E8, "M", "ө"),
+ (0x4E9, "V"),
+ (0x4EA, "M", "ӫ"),
+ (0x4EB, "V"),
+ (0x4EC, "M", "ӭ"),
+ (0x4ED, "V"),
+ (0x4EE, "M", "ӯ"),
+ (0x4EF, "V"),
+ (0x4F0, "M", "ӱ"),
+ (0x4F1, "V"),
+ (0x4F2, "M", "ӳ"),
+ (0x4F3, "V"),
+ (0x4F4, "M", "ӵ"),
+ (0x4F5, "V"),
+ (0x4F6, "M", "ӷ"),
+ (0x4F7, "V"),
+ (0x4F8, "M", "ӹ"),
+ (0x4F9, "V"),
+ (0x4FA, "M", "ӻ"),
+ (0x4FB, "V"),
+ (0x4FC, "M", "ӽ"),
+ (0x4FD, "V"),
+ (0x4FE, "M", "ӿ"),
+ (0x4FF, "V"),
+ (0x500, "M", "ԁ"),
+ (0x501, "V"),
+ (0x502, "M", "ԃ"),
+ ]
+
+
+def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x503, "V"),
+ (0x504, "M", "ԅ"),
+ (0x505, "V"),
+ (0x506, "M", "ԇ"),
+ (0x507, "V"),
+ (0x508, "M", "ԉ"),
+ (0x509, "V"),
+ (0x50A, "M", "ԋ"),
+ (0x50B, "V"),
+ (0x50C, "M", "ԍ"),
+ (0x50D, "V"),
+ (0x50E, "M", "ԏ"),
+ (0x50F, "V"),
+ (0x510, "M", "ԑ"),
+ (0x511, "V"),
+ (0x512, "M", "ԓ"),
+ (0x513, "V"),
+ (0x514, "M", "ԕ"),
+ (0x515, "V"),
+ (0x516, "M", "ԗ"),
+ (0x517, "V"),
+ (0x518, "M", "ԙ"),
+ (0x519, "V"),
+ (0x51A, "M", "ԛ"),
+ (0x51B, "V"),
+ (0x51C, "M", "ԝ"),
+ (0x51D, "V"),
+ (0x51E, "M", "ԟ"),
+ (0x51F, "V"),
+ (0x520, "M", "ԡ"),
+ (0x521, "V"),
+ (0x522, "M", "ԣ"),
+ (0x523, "V"),
+ (0x524, "M", "ԥ"),
+ (0x525, "V"),
+ (0x526, "M", "ԧ"),
+ (0x527, "V"),
+ (0x528, "M", "ԩ"),
+ (0x529, "V"),
+ (0x52A, "M", "ԫ"),
+ (0x52B, "V"),
+ (0x52C, "M", "ԭ"),
+ (0x52D, "V"),
+ (0x52E, "M", "ԯ"),
+ (0x52F, "V"),
+ (0x530, "X"),
+ (0x531, "M", "ա"),
+ (0x532, "M", "բ"),
+ (0x533, "M", "գ"),
+ (0x534, "M", "դ"),
+ (0x535, "M", "ե"),
+ (0x536, "M", "զ"),
+ (0x537, "M", "է"),
+ (0x538, "M", "ը"),
+ (0x539, "M", "թ"),
+ (0x53A, "M", "ժ"),
+ (0x53B, "M", "ի"),
+ (0x53C, "M", "լ"),
+ (0x53D, "M", "խ"),
+ (0x53E, "M", "ծ"),
+ (0x53F, "M", "կ"),
+ (0x540, "M", "հ"),
+ (0x541, "M", "ձ"),
+ (0x542, "M", "ղ"),
+ (0x543, "M", "ճ"),
+ (0x544, "M", "մ"),
+ (0x545, "M", "յ"),
+ (0x546, "M", "ն"),
+ (0x547, "M", "շ"),
+ (0x548, "M", "ո"),
+ (0x549, "M", "չ"),
+ (0x54A, "M", "պ"),
+ (0x54B, "M", "ջ"),
+ (0x54C, "M", "ռ"),
+ (0x54D, "M", "ս"),
+ (0x54E, "M", "վ"),
+ (0x54F, "M", "տ"),
+ (0x550, "M", "ր"),
+ (0x551, "M", "ց"),
+ (0x552, "M", "ւ"),
+ (0x553, "M", "փ"),
+ (0x554, "M", "ք"),
+ (0x555, "M", "օ"),
+ (0x556, "M", "ֆ"),
+ (0x557, "X"),
+ (0x559, "V"),
+ (0x587, "M", "եւ"),
+ (0x588, "V"),
+ (0x58B, "X"),
+ (0x58D, "V"),
+ (0x590, "X"),
+ (0x591, "V"),
+ (0x5C8, "X"),
+ (0x5D0, "V"),
+ (0x5EB, "X"),
+ (0x5EF, "V"),
+ (0x5F5, "X"),
+ (0x606, "V"),
+ (0x61C, "X"),
+ (0x61D, "V"),
+ ]
+
+
+def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x675, "M", "اٴ"),
+ (0x676, "M", "وٴ"),
+ (0x677, "M", "ۇٴ"),
+ (0x678, "M", "يٴ"),
+ (0x679, "V"),
+ (0x6DD, "X"),
+ (0x6DE, "V"),
+ (0x70E, "X"),
+ (0x710, "V"),
+ (0x74B, "X"),
+ (0x74D, "V"),
+ (0x7B2, "X"),
+ (0x7C0, "V"),
+ (0x7FB, "X"),
+ (0x7FD, "V"),
+ (0x82E, "X"),
+ (0x830, "V"),
+ (0x83F, "X"),
+ (0x840, "V"),
+ (0x85C, "X"),
+ (0x85E, "V"),
+ (0x85F, "X"),
+ (0x860, "V"),
+ (0x86B, "X"),
+ (0x870, "V"),
+ (0x88F, "X"),
+ (0x897, "V"),
+ (0x8E2, "X"),
+ (0x8E3, "V"),
+ (0x958, "M", "क़"),
+ (0x959, "M", "ख़"),
+ (0x95A, "M", "ग़"),
+ (0x95B, "M", "ज़"),
+ (0x95C, "M", "ड़"),
+ (0x95D, "M", "ढ़"),
+ (0x95E, "M", "फ़"),
+ (0x95F, "M", "य़"),
+ (0x960, "V"),
+ (0x984, "X"),
+ (0x985, "V"),
+ (0x98D, "X"),
+ (0x98F, "V"),
+ (0x991, "X"),
+ (0x993, "V"),
+ (0x9A9, "X"),
+ (0x9AA, "V"),
+ (0x9B1, "X"),
+ (0x9B2, "V"),
+ (0x9B3, "X"),
+ (0x9B6, "V"),
+ (0x9BA, "X"),
+ (0x9BC, "V"),
+ (0x9C5, "X"),
+ (0x9C7, "V"),
+ (0x9C9, "X"),
+ (0x9CB, "V"),
+ (0x9CF, "X"),
+ (0x9D7, "V"),
+ (0x9D8, "X"),
+ (0x9DC, "M", "ড়"),
+ (0x9DD, "M", "ঢ়"),
+ (0x9DE, "X"),
+ (0x9DF, "M", "য়"),
+ (0x9E0, "V"),
+ (0x9E4, "X"),
+ (0x9E6, "V"),
+ (0x9FF, "X"),
+ (0xA01, "V"),
+ (0xA04, "X"),
+ (0xA05, "V"),
+ (0xA0B, "X"),
+ (0xA0F, "V"),
+ (0xA11, "X"),
+ (0xA13, "V"),
+ (0xA29, "X"),
+ (0xA2A, "V"),
+ (0xA31, "X"),
+ (0xA32, "V"),
+ (0xA33, "M", "ਲ਼"),
+ (0xA34, "X"),
+ (0xA35, "V"),
+ (0xA36, "M", "ਸ਼"),
+ (0xA37, "X"),
+ (0xA38, "V"),
+ (0xA3A, "X"),
+ (0xA3C, "V"),
+ (0xA3D, "X"),
+ (0xA3E, "V"),
+ (0xA43, "X"),
+ (0xA47, "V"),
+ (0xA49, "X"),
+ (0xA4B, "V"),
+ (0xA4E, "X"),
+ (0xA51, "V"),
+ (0xA52, "X"),
+ (0xA59, "M", "ਖ਼"),
+ (0xA5A, "M", "ਗ਼"),
+ (0xA5B, "M", "ਜ਼"),
+ (0xA5C, "V"),
+ (0xA5D, "X"),
+ ]
+
+
+def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA5E, "M", "ਫ਼"),
+ (0xA5F, "X"),
+ (0xA66, "V"),
+ (0xA77, "X"),
+ (0xA81, "V"),
+ (0xA84, "X"),
+ (0xA85, "V"),
+ (0xA8E, "X"),
+ (0xA8F, "V"),
+ (0xA92, "X"),
+ (0xA93, "V"),
+ (0xAA9, "X"),
+ (0xAAA, "V"),
+ (0xAB1, "X"),
+ (0xAB2, "V"),
+ (0xAB4, "X"),
+ (0xAB5, "V"),
+ (0xABA, "X"),
+ (0xABC, "V"),
+ (0xAC6, "X"),
+ (0xAC7, "V"),
+ (0xACA, "X"),
+ (0xACB, "V"),
+ (0xACE, "X"),
+ (0xAD0, "V"),
+ (0xAD1, "X"),
+ (0xAE0, "V"),
+ (0xAE4, "X"),
+ (0xAE6, "V"),
+ (0xAF2, "X"),
+ (0xAF9, "V"),
+ (0xB00, "X"),
+ (0xB01, "V"),
+ (0xB04, "X"),
+ (0xB05, "V"),
+ (0xB0D, "X"),
+ (0xB0F, "V"),
+ (0xB11, "X"),
+ (0xB13, "V"),
+ (0xB29, "X"),
+ (0xB2A, "V"),
+ (0xB31, "X"),
+ (0xB32, "V"),
+ (0xB34, "X"),
+ (0xB35, "V"),
+ (0xB3A, "X"),
+ (0xB3C, "V"),
+ (0xB45, "X"),
+ (0xB47, "V"),
+ (0xB49, "X"),
+ (0xB4B, "V"),
+ (0xB4E, "X"),
+ (0xB55, "V"),
+ (0xB58, "X"),
+ (0xB5C, "M", "ଡ଼"),
+ (0xB5D, "M", "ଢ଼"),
+ (0xB5E, "X"),
+ (0xB5F, "V"),
+ (0xB64, "X"),
+ (0xB66, "V"),
+ (0xB78, "X"),
+ (0xB82, "V"),
+ (0xB84, "X"),
+ (0xB85, "V"),
+ (0xB8B, "X"),
+ (0xB8E, "V"),
+ (0xB91, "X"),
+ (0xB92, "V"),
+ (0xB96, "X"),
+ (0xB99, "V"),
+ (0xB9B, "X"),
+ (0xB9C, "V"),
+ (0xB9D, "X"),
+ (0xB9E, "V"),
+ (0xBA0, "X"),
+ (0xBA3, "V"),
+ (0xBA5, "X"),
+ (0xBA8, "V"),
+ (0xBAB, "X"),
+ (0xBAE, "V"),
+ (0xBBA, "X"),
+ (0xBBE, "V"),
+ (0xBC3, "X"),
+ (0xBC6, "V"),
+ (0xBC9, "X"),
+ (0xBCA, "V"),
+ (0xBCE, "X"),
+ (0xBD0, "V"),
+ (0xBD1, "X"),
+ (0xBD7, "V"),
+ (0xBD8, "X"),
+ (0xBE6, "V"),
+ (0xBFB, "X"),
+ (0xC00, "V"),
+ (0xC0D, "X"),
+ (0xC0E, "V"),
+ (0xC11, "X"),
+ (0xC12, "V"),
+ (0xC29, "X"),
+ (0xC2A, "V"),
+ ]
+
+
+def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xC3A, "X"),
+ (0xC3C, "V"),
+ (0xC45, "X"),
+ (0xC46, "V"),
+ (0xC49, "X"),
+ (0xC4A, "V"),
+ (0xC4E, "X"),
+ (0xC55, "V"),
+ (0xC57, "X"),
+ (0xC58, "V"),
+ (0xC5B, "X"),
+ (0xC5D, "V"),
+ (0xC5E, "X"),
+ (0xC60, "V"),
+ (0xC64, "X"),
+ (0xC66, "V"),
+ (0xC70, "X"),
+ (0xC77, "V"),
+ (0xC8D, "X"),
+ (0xC8E, "V"),
+ (0xC91, "X"),
+ (0xC92, "V"),
+ (0xCA9, "X"),
+ (0xCAA, "V"),
+ (0xCB4, "X"),
+ (0xCB5, "V"),
+ (0xCBA, "X"),
+ (0xCBC, "V"),
+ (0xCC5, "X"),
+ (0xCC6, "V"),
+ (0xCC9, "X"),
+ (0xCCA, "V"),
+ (0xCCE, "X"),
+ (0xCD5, "V"),
+ (0xCD7, "X"),
+ (0xCDD, "V"),
+ (0xCDF, "X"),
+ (0xCE0, "V"),
+ (0xCE4, "X"),
+ (0xCE6, "V"),
+ (0xCF0, "X"),
+ (0xCF1, "V"),
+ (0xCF4, "X"),
+ (0xD00, "V"),
+ (0xD0D, "X"),
+ (0xD0E, "V"),
+ (0xD11, "X"),
+ (0xD12, "V"),
+ (0xD45, "X"),
+ (0xD46, "V"),
+ (0xD49, "X"),
+ (0xD4A, "V"),
+ (0xD50, "X"),
+ (0xD54, "V"),
+ (0xD64, "X"),
+ (0xD66, "V"),
+ (0xD80, "X"),
+ (0xD81, "V"),
+ (0xD84, "X"),
+ (0xD85, "V"),
+ (0xD97, "X"),
+ (0xD9A, "V"),
+ (0xDB2, "X"),
+ (0xDB3, "V"),
+ (0xDBC, "X"),
+ (0xDBD, "V"),
+ (0xDBE, "X"),
+ (0xDC0, "V"),
+ (0xDC7, "X"),
+ (0xDCA, "V"),
+ (0xDCB, "X"),
+ (0xDCF, "V"),
+ (0xDD5, "X"),
+ (0xDD6, "V"),
+ (0xDD7, "X"),
+ (0xDD8, "V"),
+ (0xDE0, "X"),
+ (0xDE6, "V"),
+ (0xDF0, "X"),
+ (0xDF2, "V"),
+ (0xDF5, "X"),
+ (0xE01, "V"),
+ (0xE33, "M", "ํา"),
+ (0xE34, "V"),
+ (0xE3B, "X"),
+ (0xE3F, "V"),
+ (0xE5C, "X"),
+ (0xE81, "V"),
+ (0xE83, "X"),
+ (0xE84, "V"),
+ (0xE85, "X"),
+ (0xE86, "V"),
+ (0xE8B, "X"),
+ (0xE8C, "V"),
+ (0xEA4, "X"),
+ (0xEA5, "V"),
+ (0xEA6, "X"),
+ (0xEA7, "V"),
+ (0xEB3, "M", "ໍາ"),
+ (0xEB4, "V"),
+ ]
+
+
+def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xEBE, "X"),
+ (0xEC0, "V"),
+ (0xEC5, "X"),
+ (0xEC6, "V"),
+ (0xEC7, "X"),
+ (0xEC8, "V"),
+ (0xECF, "X"),
+ (0xED0, "V"),
+ (0xEDA, "X"),
+ (0xEDC, "M", "ຫນ"),
+ (0xEDD, "M", "ຫມ"),
+ (0xEDE, "V"),
+ (0xEE0, "X"),
+ (0xF00, "V"),
+ (0xF0C, "M", "་"),
+ (0xF0D, "V"),
+ (0xF43, "M", "གྷ"),
+ (0xF44, "V"),
+ (0xF48, "X"),
+ (0xF49, "V"),
+ (0xF4D, "M", "ཌྷ"),
+ (0xF4E, "V"),
+ (0xF52, "M", "དྷ"),
+ (0xF53, "V"),
+ (0xF57, "M", "བྷ"),
+ (0xF58, "V"),
+ (0xF5C, "M", "ཛྷ"),
+ (0xF5D, "V"),
+ (0xF69, "M", "ཀྵ"),
+ (0xF6A, "V"),
+ (0xF6D, "X"),
+ (0xF71, "V"),
+ (0xF73, "M", "ཱི"),
+ (0xF74, "V"),
+ (0xF75, "M", "ཱུ"),
+ (0xF76, "M", "ྲྀ"),
+ (0xF77, "M", "ྲཱྀ"),
+ (0xF78, "M", "ླྀ"),
+ (0xF79, "M", "ླཱྀ"),
+ (0xF7A, "V"),
+ (0xF81, "M", "ཱྀ"),
+ (0xF82, "V"),
+ (0xF93, "M", "ྒྷ"),
+ (0xF94, "V"),
+ (0xF98, "X"),
+ (0xF99, "V"),
+ (0xF9D, "M", "ྜྷ"),
+ (0xF9E, "V"),
+ (0xFA2, "M", "ྡྷ"),
+ (0xFA3, "V"),
+ (0xFA7, "M", "ྦྷ"),
+ (0xFA8, "V"),
+ (0xFAC, "M", "ྫྷ"),
+ (0xFAD, "V"),
+ (0xFB9, "M", "ྐྵ"),
+ (0xFBA, "V"),
+ (0xFBD, "X"),
+ (0xFBE, "V"),
+ (0xFCD, "X"),
+ (0xFCE, "V"),
+ (0xFDB, "X"),
+ (0x1000, "V"),
+ (0x10A0, "M", "ⴀ"),
+ (0x10A1, "M", "ⴁ"),
+ (0x10A2, "M", "ⴂ"),
+ (0x10A3, "M", "ⴃ"),
+ (0x10A4, "M", "ⴄ"),
+ (0x10A5, "M", "ⴅ"),
+ (0x10A6, "M", "ⴆ"),
+ (0x10A7, "M", "ⴇ"),
+ (0x10A8, "M", "ⴈ"),
+ (0x10A9, "M", "ⴉ"),
+ (0x10AA, "M", "ⴊ"),
+ (0x10AB, "M", "ⴋ"),
+ (0x10AC, "M", "ⴌ"),
+ (0x10AD, "M", "ⴍ"),
+ (0x10AE, "M", "ⴎ"),
+ (0x10AF, "M", "ⴏ"),
+ (0x10B0, "M", "ⴐ"),
+ (0x10B1, "M", "ⴑ"),
+ (0x10B2, "M", "ⴒ"),
+ (0x10B3, "M", "ⴓ"),
+ (0x10B4, "M", "ⴔ"),
+ (0x10B5, "M", "ⴕ"),
+ (0x10B6, "M", "ⴖ"),
+ (0x10B7, "M", "ⴗ"),
+ (0x10B8, "M", "ⴘ"),
+ (0x10B9, "M", "ⴙ"),
+ (0x10BA, "M", "ⴚ"),
+ (0x10BB, "M", "ⴛ"),
+ (0x10BC, "M", "ⴜ"),
+ (0x10BD, "M", "ⴝ"),
+ (0x10BE, "M", "ⴞ"),
+ (0x10BF, "M", "ⴟ"),
+ (0x10C0, "M", "ⴠ"),
+ (0x10C1, "M", "ⴡ"),
+ (0x10C2, "M", "ⴢ"),
+ (0x10C3, "M", "ⴣ"),
+ (0x10C4, "M", "ⴤ"),
+ (0x10C5, "M", "ⴥ"),
+ ]
+
+
+def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x10C6, "X"),
+ (0x10C7, "M", "ⴧ"),
+ (0x10C8, "X"),
+ (0x10CD, "M", "ⴭ"),
+ (0x10CE, "X"),
+ (0x10D0, "V"),
+ (0x10FC, "M", "ნ"),
+ (0x10FD, "V"),
+ (0x115F, "I"),
+ (0x1161, "V"),
+ (0x1249, "X"),
+ (0x124A, "V"),
+ (0x124E, "X"),
+ (0x1250, "V"),
+ (0x1257, "X"),
+ (0x1258, "V"),
+ (0x1259, "X"),
+ (0x125A, "V"),
+ (0x125E, "X"),
+ (0x1260, "V"),
+ (0x1289, "X"),
+ (0x128A, "V"),
+ (0x128E, "X"),
+ (0x1290, "V"),
+ (0x12B1, "X"),
+ (0x12B2, "V"),
+ (0x12B6, "X"),
+ (0x12B8, "V"),
+ (0x12BF, "X"),
+ (0x12C0, "V"),
+ (0x12C1, "X"),
+ (0x12C2, "V"),
+ (0x12C6, "X"),
+ (0x12C8, "V"),
+ (0x12D7, "X"),
+ (0x12D8, "V"),
+ (0x1311, "X"),
+ (0x1312, "V"),
+ (0x1316, "X"),
+ (0x1318, "V"),
+ (0x135B, "X"),
+ (0x135D, "V"),
+ (0x137D, "X"),
+ (0x1380, "V"),
+ (0x139A, "X"),
+ (0x13A0, "V"),
+ (0x13F6, "X"),
+ (0x13F8, "M", "Ᏸ"),
+ (0x13F9, "M", "Ᏹ"),
+ (0x13FA, "M", "Ᏺ"),
+ (0x13FB, "M", "Ᏻ"),
+ (0x13FC, "M", "Ᏼ"),
+ (0x13FD, "M", "Ᏽ"),
+ (0x13FE, "X"),
+ (0x1400, "V"),
+ (0x1680, "X"),
+ (0x1681, "V"),
+ (0x169D, "X"),
+ (0x16A0, "V"),
+ (0x16F9, "X"),
+ (0x1700, "V"),
+ (0x1716, "X"),
+ (0x171F, "V"),
+ (0x1737, "X"),
+ (0x1740, "V"),
+ (0x1754, "X"),
+ (0x1760, "V"),
+ (0x176D, "X"),
+ (0x176E, "V"),
+ (0x1771, "X"),
+ (0x1772, "V"),
+ (0x1774, "X"),
+ (0x1780, "V"),
+ (0x17B4, "I"),
+ (0x17B6, "V"),
+ (0x17DE, "X"),
+ (0x17E0, "V"),
+ (0x17EA, "X"),
+ (0x17F0, "V"),
+ (0x17FA, "X"),
+ (0x1800, "V"),
+ (0x180B, "I"),
+ (0x1810, "V"),
+ (0x181A, "X"),
+ (0x1820, "V"),
+ (0x1879, "X"),
+ (0x1880, "V"),
+ (0x18AB, "X"),
+ (0x18B0, "V"),
+ (0x18F6, "X"),
+ (0x1900, "V"),
+ (0x191F, "X"),
+ (0x1920, "V"),
+ (0x192C, "X"),
+ (0x1930, "V"),
+ (0x193C, "X"),
+ (0x1940, "V"),
+ (0x1941, "X"),
+ (0x1944, "V"),
+ (0x196E, "X"),
+ ]
+
+
+def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1970, "V"),
+ (0x1975, "X"),
+ (0x1980, "V"),
+ (0x19AC, "X"),
+ (0x19B0, "V"),
+ (0x19CA, "X"),
+ (0x19D0, "V"),
+ (0x19DB, "X"),
+ (0x19DE, "V"),
+ (0x1A1C, "X"),
+ (0x1A1E, "V"),
+ (0x1A5F, "X"),
+ (0x1A60, "V"),
+ (0x1A7D, "X"),
+ (0x1A7F, "V"),
+ (0x1A8A, "X"),
+ (0x1A90, "V"),
+ (0x1A9A, "X"),
+ (0x1AA0, "V"),
+ (0x1AAE, "X"),
+ (0x1AB0, "V"),
+ (0x1ACF, "X"),
+ (0x1B00, "V"),
+ (0x1B4D, "X"),
+ (0x1B4E, "V"),
+ (0x1BF4, "X"),
+ (0x1BFC, "V"),
+ (0x1C38, "X"),
+ (0x1C3B, "V"),
+ (0x1C4A, "X"),
+ (0x1C4D, "V"),
+ (0x1C80, "M", "в"),
+ (0x1C81, "M", "д"),
+ (0x1C82, "M", "о"),
+ (0x1C83, "M", "с"),
+ (0x1C84, "M", "т"),
+ (0x1C86, "M", "ъ"),
+ (0x1C87, "M", "ѣ"),
+ (0x1C88, "M", "ꙋ"),
+ (0x1C89, "M", ""),
+ (0x1C8A, "V"),
+ (0x1C8B, "X"),
+ (0x1C90, "M", "ა"),
+ (0x1C91, "M", "ბ"),
+ (0x1C92, "M", "გ"),
+ (0x1C93, "M", "დ"),
+ (0x1C94, "M", "ე"),
+ (0x1C95, "M", "ვ"),
+ (0x1C96, "M", "ზ"),
+ (0x1C97, "M", "თ"),
+ (0x1C98, "M", "ი"),
+ (0x1C99, "M", "კ"),
+ (0x1C9A, "M", "ლ"),
+ (0x1C9B, "M", "მ"),
+ (0x1C9C, "M", "ნ"),
+ (0x1C9D, "M", "ო"),
+ (0x1C9E, "M", "პ"),
+ (0x1C9F, "M", "ჟ"),
+ (0x1CA0, "M", "რ"),
+ (0x1CA1, "M", "ს"),
+ (0x1CA2, "M", "ტ"),
+ (0x1CA3, "M", "უ"),
+ (0x1CA4, "M", "ფ"),
+ (0x1CA5, "M", "ქ"),
+ (0x1CA6, "M", "ღ"),
+ (0x1CA7, "M", "ყ"),
+ (0x1CA8, "M", "შ"),
+ (0x1CA9, "M", "ჩ"),
+ (0x1CAA, "M", "ც"),
+ (0x1CAB, "M", "ძ"),
+ (0x1CAC, "M", "წ"),
+ (0x1CAD, "M", "ჭ"),
+ (0x1CAE, "M", "ხ"),
+ (0x1CAF, "M", "ჯ"),
+ (0x1CB0, "M", "ჰ"),
+ (0x1CB1, "M", "ჱ"),
+ (0x1CB2, "M", "ჲ"),
+ (0x1CB3, "M", "ჳ"),
+ (0x1CB4, "M", "ჴ"),
+ (0x1CB5, "M", "ჵ"),
+ (0x1CB6, "M", "ჶ"),
+ (0x1CB7, "M", "ჷ"),
+ (0x1CB8, "M", "ჸ"),
+ (0x1CB9, "M", "ჹ"),
+ (0x1CBA, "M", "ჺ"),
+ (0x1CBB, "X"),
+ (0x1CBD, "M", "ჽ"),
+ (0x1CBE, "M", "ჾ"),
+ (0x1CBF, "M", "ჿ"),
+ (0x1CC0, "V"),
+ (0x1CC8, "X"),
+ (0x1CD0, "V"),
+ (0x1CFB, "X"),
+ (0x1D00, "V"),
+ (0x1D2C, "M", "a"),
+ (0x1D2D, "M", "æ"),
+ (0x1D2E, "M", "b"),
+ (0x1D2F, "V"),
+ (0x1D30, "M", "d"),
+ (0x1D31, "M", "e"),
+ ]
+
+
+def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D32, "M", "ǝ"),
+ (0x1D33, "M", "g"),
+ (0x1D34, "M", "h"),
+ (0x1D35, "M", "i"),
+ (0x1D36, "M", "j"),
+ (0x1D37, "M", "k"),
+ (0x1D38, "M", "l"),
+ (0x1D39, "M", "m"),
+ (0x1D3A, "M", "n"),
+ (0x1D3B, "V"),
+ (0x1D3C, "M", "o"),
+ (0x1D3D, "M", "ȣ"),
+ (0x1D3E, "M", "p"),
+ (0x1D3F, "M", "r"),
+ (0x1D40, "M", "t"),
+ (0x1D41, "M", "u"),
+ (0x1D42, "M", "w"),
+ (0x1D43, "M", "a"),
+ (0x1D44, "M", "ɐ"),
+ (0x1D45, "M", "ɑ"),
+ (0x1D46, "M", "ᴂ"),
+ (0x1D47, "M", "b"),
+ (0x1D48, "M", "d"),
+ (0x1D49, "M", "e"),
+ (0x1D4A, "M", "ə"),
+ (0x1D4B, "M", "ɛ"),
+ (0x1D4C, "M", "ɜ"),
+ (0x1D4D, "M", "g"),
+ (0x1D4E, "V"),
+ (0x1D4F, "M", "k"),
+ (0x1D50, "M", "m"),
+ (0x1D51, "M", "ŋ"),
+ (0x1D52, "M", "o"),
+ (0x1D53, "M", "ɔ"),
+ (0x1D54, "M", "ᴖ"),
+ (0x1D55, "M", "ᴗ"),
+ (0x1D56, "M", "p"),
+ (0x1D57, "M", "t"),
+ (0x1D58, "M", "u"),
+ (0x1D59, "M", "ᴝ"),
+ (0x1D5A, "M", "ɯ"),
+ (0x1D5B, "M", "v"),
+ (0x1D5C, "M", "ᴥ"),
+ (0x1D5D, "M", "β"),
+ (0x1D5E, "M", "γ"),
+ (0x1D5F, "M", "δ"),
+ (0x1D60, "M", "φ"),
+ (0x1D61, "M", "χ"),
+ (0x1D62, "M", "i"),
+ (0x1D63, "M", "r"),
+ (0x1D64, "M", "u"),
+ (0x1D65, "M", "v"),
+ (0x1D66, "M", "β"),
+ (0x1D67, "M", "γ"),
+ (0x1D68, "M", "ρ"),
+ (0x1D69, "M", "φ"),
+ (0x1D6A, "M", "χ"),
+ (0x1D6B, "V"),
+ (0x1D78, "M", "н"),
+ (0x1D79, "V"),
+ (0x1D9B, "M", "ɒ"),
+ (0x1D9C, "M", "c"),
+ (0x1D9D, "M", "ɕ"),
+ (0x1D9E, "M", "ð"),
+ (0x1D9F, "M", "ɜ"),
+ (0x1DA0, "M", "f"),
+ (0x1DA1, "M", "ɟ"),
+ (0x1DA2, "M", "ɡ"),
+ (0x1DA3, "M", "ɥ"),
+ (0x1DA4, "M", "ɨ"),
+ (0x1DA5, "M", "ɩ"),
+ (0x1DA6, "M", "ɪ"),
+ (0x1DA7, "M", "ᵻ"),
+ (0x1DA8, "M", "ʝ"),
+ (0x1DA9, "M", "ɭ"),
+ (0x1DAA, "M", "ᶅ"),
+ (0x1DAB, "M", "ʟ"),
+ (0x1DAC, "M", "ɱ"),
+ (0x1DAD, "M", "ɰ"),
+ (0x1DAE, "M", "ɲ"),
+ (0x1DAF, "M", "ɳ"),
+ (0x1DB0, "M", "ɴ"),
+ (0x1DB1, "M", "ɵ"),
+ (0x1DB2, "M", "ɸ"),
+ (0x1DB3, "M", "ʂ"),
+ (0x1DB4, "M", "ʃ"),
+ (0x1DB5, "M", "ƫ"),
+ (0x1DB6, "M", "ʉ"),
+ (0x1DB7, "M", "ʊ"),
+ (0x1DB8, "M", "ᴜ"),
+ (0x1DB9, "M", "ʋ"),
+ (0x1DBA, "M", "ʌ"),
+ (0x1DBB, "M", "z"),
+ (0x1DBC, "M", "ʐ"),
+ (0x1DBD, "M", "ʑ"),
+ (0x1DBE, "M", "ʒ"),
+ (0x1DBF, "M", "θ"),
+ (0x1DC0, "V"),
+ (0x1E00, "M", "ḁ"),
+ (0x1E01, "V"),
+ ]
+
+
+def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E02, "M", "ḃ"),
+ (0x1E03, "V"),
+ (0x1E04, "M", "ḅ"),
+ (0x1E05, "V"),
+ (0x1E06, "M", "ḇ"),
+ (0x1E07, "V"),
+ (0x1E08, "M", "ḉ"),
+ (0x1E09, "V"),
+ (0x1E0A, "M", "ḋ"),
+ (0x1E0B, "V"),
+ (0x1E0C, "M", "ḍ"),
+ (0x1E0D, "V"),
+ (0x1E0E, "M", "ḏ"),
+ (0x1E0F, "V"),
+ (0x1E10, "M", "ḑ"),
+ (0x1E11, "V"),
+ (0x1E12, "M", "ḓ"),
+ (0x1E13, "V"),
+ (0x1E14, "M", "ḕ"),
+ (0x1E15, "V"),
+ (0x1E16, "M", "ḗ"),
+ (0x1E17, "V"),
+ (0x1E18, "M", "ḙ"),
+ (0x1E19, "V"),
+ (0x1E1A, "M", "ḛ"),
+ (0x1E1B, "V"),
+ (0x1E1C, "M", "ḝ"),
+ (0x1E1D, "V"),
+ (0x1E1E, "M", "ḟ"),
+ (0x1E1F, "V"),
+ (0x1E20, "M", "ḡ"),
+ (0x1E21, "V"),
+ (0x1E22, "M", "ḣ"),
+ (0x1E23, "V"),
+ (0x1E24, "M", "ḥ"),
+ (0x1E25, "V"),
+ (0x1E26, "M", "ḧ"),
+ (0x1E27, "V"),
+ (0x1E28, "M", "ḩ"),
+ (0x1E29, "V"),
+ (0x1E2A, "M", "ḫ"),
+ (0x1E2B, "V"),
+ (0x1E2C, "M", "ḭ"),
+ (0x1E2D, "V"),
+ (0x1E2E, "M", "ḯ"),
+ (0x1E2F, "V"),
+ (0x1E30, "M", "ḱ"),
+ (0x1E31, "V"),
+ (0x1E32, "M", "ḳ"),
+ (0x1E33, "V"),
+ (0x1E34, "M", "ḵ"),
+ (0x1E35, "V"),
+ (0x1E36, "M", "ḷ"),
+ (0x1E37, "V"),
+ (0x1E38, "M", "ḹ"),
+ (0x1E39, "V"),
+ (0x1E3A, "M", "ḻ"),
+ (0x1E3B, "V"),
+ (0x1E3C, "M", "ḽ"),
+ (0x1E3D, "V"),
+ (0x1E3E, "M", "ḿ"),
+ (0x1E3F, "V"),
+ (0x1E40, "M", "ṁ"),
+ (0x1E41, "V"),
+ (0x1E42, "M", "ṃ"),
+ (0x1E43, "V"),
+ (0x1E44, "M", "ṅ"),
+ (0x1E45, "V"),
+ (0x1E46, "M", "ṇ"),
+ (0x1E47, "V"),
+ (0x1E48, "M", "ṉ"),
+ (0x1E49, "V"),
+ (0x1E4A, "M", "ṋ"),
+ (0x1E4B, "V"),
+ (0x1E4C, "M", "ṍ"),
+ (0x1E4D, "V"),
+ (0x1E4E, "M", "ṏ"),
+ (0x1E4F, "V"),
+ (0x1E50, "M", "ṑ"),
+ (0x1E51, "V"),
+ (0x1E52, "M", "ṓ"),
+ (0x1E53, "V"),
+ (0x1E54, "M", "ṕ"),
+ (0x1E55, "V"),
+ (0x1E56, "M", "ṗ"),
+ (0x1E57, "V"),
+ (0x1E58, "M", "ṙ"),
+ (0x1E59, "V"),
+ (0x1E5A, "M", "ṛ"),
+ (0x1E5B, "V"),
+ (0x1E5C, "M", "ṝ"),
+ (0x1E5D, "V"),
+ (0x1E5E, "M", "ṟ"),
+ (0x1E5F, "V"),
+ (0x1E60, "M", "ṡ"),
+ (0x1E61, "V"),
+ (0x1E62, "M", "ṣ"),
+ (0x1E63, "V"),
+ (0x1E64, "M", "ṥ"),
+ (0x1E65, "V"),
+ ]
+
+
+def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E66, "M", "ṧ"),
+ (0x1E67, "V"),
+ (0x1E68, "M", "ṩ"),
+ (0x1E69, "V"),
+ (0x1E6A, "M", "ṫ"),
+ (0x1E6B, "V"),
+ (0x1E6C, "M", "ṭ"),
+ (0x1E6D, "V"),
+ (0x1E6E, "M", "ṯ"),
+ (0x1E6F, "V"),
+ (0x1E70, "M", "ṱ"),
+ (0x1E71, "V"),
+ (0x1E72, "M", "ṳ"),
+ (0x1E73, "V"),
+ (0x1E74, "M", "ṵ"),
+ (0x1E75, "V"),
+ (0x1E76, "M", "ṷ"),
+ (0x1E77, "V"),
+ (0x1E78, "M", "ṹ"),
+ (0x1E79, "V"),
+ (0x1E7A, "M", "ṻ"),
+ (0x1E7B, "V"),
+ (0x1E7C, "M", "ṽ"),
+ (0x1E7D, "V"),
+ (0x1E7E, "M", "ṿ"),
+ (0x1E7F, "V"),
+ (0x1E80, "M", "ẁ"),
+ (0x1E81, "V"),
+ (0x1E82, "M", "ẃ"),
+ (0x1E83, "V"),
+ (0x1E84, "M", "ẅ"),
+ (0x1E85, "V"),
+ (0x1E86, "M", "ẇ"),
+ (0x1E87, "V"),
+ (0x1E88, "M", "ẉ"),
+ (0x1E89, "V"),
+ (0x1E8A, "M", "ẋ"),
+ (0x1E8B, "V"),
+ (0x1E8C, "M", "ẍ"),
+ (0x1E8D, "V"),
+ (0x1E8E, "M", "ẏ"),
+ (0x1E8F, "V"),
+ (0x1E90, "M", "ẑ"),
+ (0x1E91, "V"),
+ (0x1E92, "M", "ẓ"),
+ (0x1E93, "V"),
+ (0x1E94, "M", "ẕ"),
+ (0x1E95, "V"),
+ (0x1E9A, "M", "aʾ"),
+ (0x1E9B, "M", "ṡ"),
+ (0x1E9C, "V"),
+ (0x1E9E, "M", "ß"),
+ (0x1E9F, "V"),
+ (0x1EA0, "M", "ạ"),
+ (0x1EA1, "V"),
+ (0x1EA2, "M", "ả"),
+ (0x1EA3, "V"),
+ (0x1EA4, "M", "ấ"),
+ (0x1EA5, "V"),
+ (0x1EA6, "M", "ầ"),
+ (0x1EA7, "V"),
+ (0x1EA8, "M", "ẩ"),
+ (0x1EA9, "V"),
+ (0x1EAA, "M", "ẫ"),
+ (0x1EAB, "V"),
+ (0x1EAC, "M", "ậ"),
+ (0x1EAD, "V"),
+ (0x1EAE, "M", "ắ"),
+ (0x1EAF, "V"),
+ (0x1EB0, "M", "ằ"),
+ (0x1EB1, "V"),
+ (0x1EB2, "M", "ẳ"),
+ (0x1EB3, "V"),
+ (0x1EB4, "M", "ẵ"),
+ (0x1EB5, "V"),
+ (0x1EB6, "M", "ặ"),
+ (0x1EB7, "V"),
+ (0x1EB8, "M", "ẹ"),
+ (0x1EB9, "V"),
+ (0x1EBA, "M", "ẻ"),
+ (0x1EBB, "V"),
+ (0x1EBC, "M", "ẽ"),
+ (0x1EBD, "V"),
+ (0x1EBE, "M", "ế"),
+ (0x1EBF, "V"),
+ (0x1EC0, "M", "ề"),
+ (0x1EC1, "V"),
+ (0x1EC2, "M", "ể"),
+ (0x1EC3, "V"),
+ (0x1EC4, "M", "ễ"),
+ (0x1EC5, "V"),
+ (0x1EC6, "M", "ệ"),
+ (0x1EC7, "V"),
+ (0x1EC8, "M", "ỉ"),
+ (0x1EC9, "V"),
+ (0x1ECA, "M", "ị"),
+ (0x1ECB, "V"),
+ (0x1ECC, "M", "ọ"),
+ (0x1ECD, "V"),
+ (0x1ECE, "M", "ỏ"),
+ ]
+
+
+def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1ECF, "V"),
+ (0x1ED0, "M", "ố"),
+ (0x1ED1, "V"),
+ (0x1ED2, "M", "ồ"),
+ (0x1ED3, "V"),
+ (0x1ED4, "M", "ổ"),
+ (0x1ED5, "V"),
+ (0x1ED6, "M", "ỗ"),
+ (0x1ED7, "V"),
+ (0x1ED8, "M", "ộ"),
+ (0x1ED9, "V"),
+ (0x1EDA, "M", "ớ"),
+ (0x1EDB, "V"),
+ (0x1EDC, "M", "ờ"),
+ (0x1EDD, "V"),
+ (0x1EDE, "M", "ở"),
+ (0x1EDF, "V"),
+ (0x1EE0, "M", "ỡ"),
+ (0x1EE1, "V"),
+ (0x1EE2, "M", "ợ"),
+ (0x1EE3, "V"),
+ (0x1EE4, "M", "ụ"),
+ (0x1EE5, "V"),
+ (0x1EE6, "M", "ủ"),
+ (0x1EE7, "V"),
+ (0x1EE8, "M", "ứ"),
+ (0x1EE9, "V"),
+ (0x1EEA, "M", "ừ"),
+ (0x1EEB, "V"),
+ (0x1EEC, "M", "ử"),
+ (0x1EED, "V"),
+ (0x1EEE, "M", "ữ"),
+ (0x1EEF, "V"),
+ (0x1EF0, "M", "ự"),
+ (0x1EF1, "V"),
+ (0x1EF2, "M", "ỳ"),
+ (0x1EF3, "V"),
+ (0x1EF4, "M", "ỵ"),
+ (0x1EF5, "V"),
+ (0x1EF6, "M", "ỷ"),
+ (0x1EF7, "V"),
+ (0x1EF8, "M", "ỹ"),
+ (0x1EF9, "V"),
+ (0x1EFA, "M", "ỻ"),
+ (0x1EFB, "V"),
+ (0x1EFC, "M", "ỽ"),
+ (0x1EFD, "V"),
+ (0x1EFE, "M", "ỿ"),
+ (0x1EFF, "V"),
+ (0x1F08, "M", "ἀ"),
+ (0x1F09, "M", "ἁ"),
+ (0x1F0A, "M", "ἂ"),
+ (0x1F0B, "M", "ἃ"),
+ (0x1F0C, "M", "ἄ"),
+ (0x1F0D, "M", "ἅ"),
+ (0x1F0E, "M", "ἆ"),
+ (0x1F0F, "M", "ἇ"),
+ (0x1F10, "V"),
+ (0x1F16, "X"),
+ (0x1F18, "M", "ἐ"),
+ (0x1F19, "M", "ἑ"),
+ (0x1F1A, "M", "ἒ"),
+ (0x1F1B, "M", "ἓ"),
+ (0x1F1C, "M", "ἔ"),
+ (0x1F1D, "M", "ἕ"),
+ (0x1F1E, "X"),
+ (0x1F20, "V"),
+ (0x1F28, "M", "ἠ"),
+ (0x1F29, "M", "ἡ"),
+ (0x1F2A, "M", "ἢ"),
+ (0x1F2B, "M", "ἣ"),
+ (0x1F2C, "M", "ἤ"),
+ (0x1F2D, "M", "ἥ"),
+ (0x1F2E, "M", "ἦ"),
+ (0x1F2F, "M", "ἧ"),
+ (0x1F30, "V"),
+ (0x1F38, "M", "ἰ"),
+ (0x1F39, "M", "ἱ"),
+ (0x1F3A, "M", "ἲ"),
+ (0x1F3B, "M", "ἳ"),
+ (0x1F3C, "M", "ἴ"),
+ (0x1F3D, "M", "ἵ"),
+ (0x1F3E, "M", "ἶ"),
+ (0x1F3F, "M", "ἷ"),
+ (0x1F40, "V"),
+ (0x1F46, "X"),
+ (0x1F48, "M", "ὀ"),
+ (0x1F49, "M", "ὁ"),
+ (0x1F4A, "M", "ὂ"),
+ (0x1F4B, "M", "ὃ"),
+ (0x1F4C, "M", "ὄ"),
+ (0x1F4D, "M", "ὅ"),
+ (0x1F4E, "X"),
+ (0x1F50, "V"),
+ (0x1F58, "X"),
+ (0x1F59, "M", "ὑ"),
+ (0x1F5A, "X"),
+ (0x1F5B, "M", "ὓ"),
+ (0x1F5C, "X"),
+ (0x1F5D, "M", "ὕ"),
+ ]
+
+
+def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F5E, "X"),
+ (0x1F5F, "M", "ὗ"),
+ (0x1F60, "V"),
+ (0x1F68, "M", "ὠ"),
+ (0x1F69, "M", "ὡ"),
+ (0x1F6A, "M", "ὢ"),
+ (0x1F6B, "M", "ὣ"),
+ (0x1F6C, "M", "ὤ"),
+ (0x1F6D, "M", "ὥ"),
+ (0x1F6E, "M", "ὦ"),
+ (0x1F6F, "M", "ὧ"),
+ (0x1F70, "V"),
+ (0x1F71, "M", "ά"),
+ (0x1F72, "V"),
+ (0x1F73, "M", "έ"),
+ (0x1F74, "V"),
+ (0x1F75, "M", "ή"),
+ (0x1F76, "V"),
+ (0x1F77, "M", "ί"),
+ (0x1F78, "V"),
+ (0x1F79, "M", "ό"),
+ (0x1F7A, "V"),
+ (0x1F7B, "M", "ύ"),
+ (0x1F7C, "V"),
+ (0x1F7D, "M", "ώ"),
+ (0x1F7E, "X"),
+ (0x1F80, "M", "ἀι"),
+ (0x1F81, "M", "ἁι"),
+ (0x1F82, "M", "ἂι"),
+ (0x1F83, "M", "ἃι"),
+ (0x1F84, "M", "ἄι"),
+ (0x1F85, "M", "ἅι"),
+ (0x1F86, "M", "ἆι"),
+ (0x1F87, "M", "ἇι"),
+ (0x1F88, "M", "ἀι"),
+ (0x1F89, "M", "ἁι"),
+ (0x1F8A, "M", "ἂι"),
+ (0x1F8B, "M", "ἃι"),
+ (0x1F8C, "M", "ἄι"),
+ (0x1F8D, "M", "ἅι"),
+ (0x1F8E, "M", "ἆι"),
+ (0x1F8F, "M", "ἇι"),
+ (0x1F90, "M", "ἠι"),
+ (0x1F91, "M", "ἡι"),
+ (0x1F92, "M", "ἢι"),
+ (0x1F93, "M", "ἣι"),
+ (0x1F94, "M", "ἤι"),
+ (0x1F95, "M", "ἥι"),
+ (0x1F96, "M", "ἦι"),
+ (0x1F97, "M", "ἧι"),
+ (0x1F98, "M", "ἠι"),
+ (0x1F99, "M", "ἡι"),
+ (0x1F9A, "M", "ἢι"),
+ (0x1F9B, "M", "ἣι"),
+ (0x1F9C, "M", "ἤι"),
+ (0x1F9D, "M", "ἥι"),
+ (0x1F9E, "M", "ἦι"),
+ (0x1F9F, "M", "ἧι"),
+ (0x1FA0, "M", "ὠι"),
+ (0x1FA1, "M", "ὡι"),
+ (0x1FA2, "M", "ὢι"),
+ (0x1FA3, "M", "ὣι"),
+ (0x1FA4, "M", "ὤι"),
+ (0x1FA5, "M", "ὥι"),
+ (0x1FA6, "M", "ὦι"),
+ (0x1FA7, "M", "ὧι"),
+ (0x1FA8, "M", "ὠι"),
+ (0x1FA9, "M", "ὡι"),
+ (0x1FAA, "M", "ὢι"),
+ (0x1FAB, "M", "ὣι"),
+ (0x1FAC, "M", "ὤι"),
+ (0x1FAD, "M", "ὥι"),
+ (0x1FAE, "M", "ὦι"),
+ (0x1FAF, "M", "ὧι"),
+ (0x1FB0, "V"),
+ (0x1FB2, "M", "ὰι"),
+ (0x1FB3, "M", "αι"),
+ (0x1FB4, "M", "άι"),
+ (0x1FB5, "X"),
+ (0x1FB6, "V"),
+ (0x1FB7, "M", "ᾶι"),
+ (0x1FB8, "M", "ᾰ"),
+ (0x1FB9, "M", "ᾱ"),
+ (0x1FBA, "M", "ὰ"),
+ (0x1FBB, "M", "ά"),
+ (0x1FBC, "M", "αι"),
+ (0x1FBD, "M", " ̓"),
+ (0x1FBE, "M", "ι"),
+ (0x1FBF, "M", " ̓"),
+ (0x1FC0, "M", " ͂"),
+ (0x1FC1, "M", " ̈͂"),
+ (0x1FC2, "M", "ὴι"),
+ (0x1FC3, "M", "ηι"),
+ (0x1FC4, "M", "ήι"),
+ (0x1FC5, "X"),
+ (0x1FC6, "V"),
+ (0x1FC7, "M", "ῆι"),
+ (0x1FC8, "M", "ὲ"),
+ (0x1FC9, "M", "έ"),
+ (0x1FCA, "M", "ὴ"),
+ ]
+
+
+def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1FCB, "M", "ή"),
+ (0x1FCC, "M", "ηι"),
+ (0x1FCD, "M", " ̓̀"),
+ (0x1FCE, "M", " ̓́"),
+ (0x1FCF, "M", " ̓͂"),
+ (0x1FD0, "V"),
+ (0x1FD3, "M", "ΐ"),
+ (0x1FD4, "X"),
+ (0x1FD6, "V"),
+ (0x1FD8, "M", "ῐ"),
+ (0x1FD9, "M", "ῑ"),
+ (0x1FDA, "M", "ὶ"),
+ (0x1FDB, "M", "ί"),
+ (0x1FDC, "X"),
+ (0x1FDD, "M", " ̔̀"),
+ (0x1FDE, "M", " ̔́"),
+ (0x1FDF, "M", " ̔͂"),
+ (0x1FE0, "V"),
+ (0x1FE3, "M", "ΰ"),
+ (0x1FE4, "V"),
+ (0x1FE8, "M", "ῠ"),
+ (0x1FE9, "M", "ῡ"),
+ (0x1FEA, "M", "ὺ"),
+ (0x1FEB, "M", "ύ"),
+ (0x1FEC, "M", "ῥ"),
+ (0x1FED, "M", " ̈̀"),
+ (0x1FEE, "M", " ̈́"),
+ (0x1FEF, "M", "`"),
+ (0x1FF0, "X"),
+ (0x1FF2, "M", "ὼι"),
+ (0x1FF3, "M", "ωι"),
+ (0x1FF4, "M", "ώι"),
+ (0x1FF5, "X"),
+ (0x1FF6, "V"),
+ (0x1FF7, "M", "ῶι"),
+ (0x1FF8, "M", "ὸ"),
+ (0x1FF9, "M", "ό"),
+ (0x1FFA, "M", "ὼ"),
+ (0x1FFB, "M", "ώ"),
+ (0x1FFC, "M", "ωι"),
+ (0x1FFD, "M", " ́"),
+ (0x1FFE, "M", " ̔"),
+ (0x1FFF, "X"),
+ (0x2000, "M", " "),
+ (0x200B, "I"),
+ (0x200C, "D", ""),
+ (0x200E, "X"),
+ (0x2010, "V"),
+ (0x2011, "M", "‐"),
+ (0x2012, "V"),
+ (0x2017, "M", " ̳"),
+ (0x2018, "V"),
+ (0x2024, "X"),
+ (0x2027, "V"),
+ (0x2028, "X"),
+ (0x202F, "M", " "),
+ (0x2030, "V"),
+ (0x2033, "M", "′′"),
+ (0x2034, "M", "′′′"),
+ (0x2035, "V"),
+ (0x2036, "M", "‵‵"),
+ (0x2037, "M", "‵‵‵"),
+ (0x2038, "V"),
+ (0x203C, "M", "!!"),
+ (0x203D, "V"),
+ (0x203E, "M", " ̅"),
+ (0x203F, "V"),
+ (0x2047, "M", "??"),
+ (0x2048, "M", "?!"),
+ (0x2049, "M", "!?"),
+ (0x204A, "V"),
+ (0x2057, "M", "′′′′"),
+ (0x2058, "V"),
+ (0x205F, "M", " "),
+ (0x2060, "I"),
+ (0x2065, "X"),
+ (0x206A, "I"),
+ (0x2070, "M", "0"),
+ (0x2071, "M", "i"),
+ (0x2072, "X"),
+ (0x2074, "M", "4"),
+ (0x2075, "M", "5"),
+ (0x2076, "M", "6"),
+ (0x2077, "M", "7"),
+ (0x2078, "M", "8"),
+ (0x2079, "M", "9"),
+ (0x207A, "M", "+"),
+ (0x207B, "M", "−"),
+ (0x207C, "M", "="),
+ (0x207D, "M", "("),
+ (0x207E, "M", ")"),
+ (0x207F, "M", "n"),
+ (0x2080, "M", "0"),
+ (0x2081, "M", "1"),
+ (0x2082, "M", "2"),
+ (0x2083, "M", "3"),
+ (0x2084, "M", "4"),
+ (0x2085, "M", "5"),
+ (0x2086, "M", "6"),
+ (0x2087, "M", "7"),
+ ]
+
+
+def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2088, "M", "8"),
+ (0x2089, "M", "9"),
+ (0x208A, "M", "+"),
+ (0x208B, "M", "−"),
+ (0x208C, "M", "="),
+ (0x208D, "M", "("),
+ (0x208E, "M", ")"),
+ (0x208F, "X"),
+ (0x2090, "M", "a"),
+ (0x2091, "M", "e"),
+ (0x2092, "M", "o"),
+ (0x2093, "M", "x"),
+ (0x2094, "M", "ə"),
+ (0x2095, "M", "h"),
+ (0x2096, "M", "k"),
+ (0x2097, "M", "l"),
+ (0x2098, "M", "m"),
+ (0x2099, "M", "n"),
+ (0x209A, "M", "p"),
+ (0x209B, "M", "s"),
+ (0x209C, "M", "t"),
+ (0x209D, "X"),
+ (0x20A0, "V"),
+ (0x20A8, "M", "rs"),
+ (0x20A9, "V"),
+ (0x20C1, "X"),
+ (0x20D0, "V"),
+ (0x20F1, "X"),
+ (0x2100, "M", "a/c"),
+ (0x2101, "M", "a/s"),
+ (0x2102, "M", "c"),
+ (0x2103, "M", "°c"),
+ (0x2104, "V"),
+ (0x2105, "M", "c/o"),
+ (0x2106, "M", "c/u"),
+ (0x2107, "M", "ɛ"),
+ (0x2108, "V"),
+ (0x2109, "M", "°f"),
+ (0x210A, "M", "g"),
+ (0x210B, "M", "h"),
+ (0x210F, "M", "ħ"),
+ (0x2110, "M", "i"),
+ (0x2112, "M", "l"),
+ (0x2114, "V"),
+ (0x2115, "M", "n"),
+ (0x2116, "M", "no"),
+ (0x2117, "V"),
+ (0x2119, "M", "p"),
+ (0x211A, "M", "q"),
+ (0x211B, "M", "r"),
+ (0x211E, "V"),
+ (0x2120, "M", "sm"),
+ (0x2121, "M", "tel"),
+ (0x2122, "M", "tm"),
+ (0x2123, "V"),
+ (0x2124, "M", "z"),
+ (0x2125, "V"),
+ (0x2126, "M", "ω"),
+ (0x2127, "V"),
+ (0x2128, "M", "z"),
+ (0x2129, "V"),
+ (0x212A, "M", "k"),
+ (0x212B, "M", "å"),
+ (0x212C, "M", "b"),
+ (0x212D, "M", "c"),
+ (0x212E, "V"),
+ (0x212F, "M", "e"),
+ (0x2131, "M", "f"),
+ (0x2132, "M", "ⅎ"),
+ (0x2133, "M", "m"),
+ (0x2134, "M", "o"),
+ (0x2135, "M", "א"),
+ (0x2136, "M", "ב"),
+ (0x2137, "M", "ג"),
+ (0x2138, "M", "ד"),
+ (0x2139, "M", "i"),
+ (0x213A, "V"),
+ (0x213B, "M", "fax"),
+ (0x213C, "M", "π"),
+ (0x213D, "M", "γ"),
+ (0x213F, "M", "π"),
+ (0x2140, "M", "∑"),
+ (0x2141, "V"),
+ (0x2145, "M", "d"),
+ (0x2147, "M", "e"),
+ (0x2148, "M", "i"),
+ (0x2149, "M", "j"),
+ (0x214A, "V"),
+ (0x2150, "M", "1⁄7"),
+ (0x2151, "M", "1⁄9"),
+ (0x2152, "M", "1⁄10"),
+ (0x2153, "M", "1⁄3"),
+ (0x2154, "M", "2⁄3"),
+ (0x2155, "M", "1⁄5"),
+ (0x2156, "M", "2⁄5"),
+ (0x2157, "M", "3⁄5"),
+ (0x2158, "M", "4⁄5"),
+ (0x2159, "M", "1⁄6"),
+ (0x215A, "M", "5⁄6"),
+ (0x215B, "M", "1⁄8"),
+ ]
+
+
+def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x215C, "M", "3⁄8"),
+ (0x215D, "M", "5⁄8"),
+ (0x215E, "M", "7⁄8"),
+ (0x215F, "M", "1⁄"),
+ (0x2160, "M", "i"),
+ (0x2161, "M", "ii"),
+ (0x2162, "M", "iii"),
+ (0x2163, "M", "iv"),
+ (0x2164, "M", "v"),
+ (0x2165, "M", "vi"),
+ (0x2166, "M", "vii"),
+ (0x2167, "M", "viii"),
+ (0x2168, "M", "ix"),
+ (0x2169, "M", "x"),
+ (0x216A, "M", "xi"),
+ (0x216B, "M", "xii"),
+ (0x216C, "M", "l"),
+ (0x216D, "M", "c"),
+ (0x216E, "M", "d"),
+ (0x216F, "M", "m"),
+ (0x2170, "M", "i"),
+ (0x2171, "M", "ii"),
+ (0x2172, "M", "iii"),
+ (0x2173, "M", "iv"),
+ (0x2174, "M", "v"),
+ (0x2175, "M", "vi"),
+ (0x2176, "M", "vii"),
+ (0x2177, "M", "viii"),
+ (0x2178, "M", "ix"),
+ (0x2179, "M", "x"),
+ (0x217A, "M", "xi"),
+ (0x217B, "M", "xii"),
+ (0x217C, "M", "l"),
+ (0x217D, "M", "c"),
+ (0x217E, "M", "d"),
+ (0x217F, "M", "m"),
+ (0x2180, "V"),
+ (0x2183, "M", "ↄ"),
+ (0x2184, "V"),
+ (0x2189, "M", "0⁄3"),
+ (0x218A, "V"),
+ (0x218C, "X"),
+ (0x2190, "V"),
+ (0x222C, "M", "∫∫"),
+ (0x222D, "M", "∫∫∫"),
+ (0x222E, "V"),
+ (0x222F, "M", "∮∮"),
+ (0x2230, "M", "∮∮∮"),
+ (0x2231, "V"),
+ (0x2329, "M", "〈"),
+ (0x232A, "M", "〉"),
+ (0x232B, "V"),
+ (0x242A, "X"),
+ (0x2440, "V"),
+ (0x244B, "X"),
+ (0x2460, "M", "1"),
+ (0x2461, "M", "2"),
+ (0x2462, "M", "3"),
+ (0x2463, "M", "4"),
+ (0x2464, "M", "5"),
+ (0x2465, "M", "6"),
+ (0x2466, "M", "7"),
+ (0x2467, "M", "8"),
+ (0x2468, "M", "9"),
+ (0x2469, "M", "10"),
+ (0x246A, "M", "11"),
+ (0x246B, "M", "12"),
+ (0x246C, "M", "13"),
+ (0x246D, "M", "14"),
+ (0x246E, "M", "15"),
+ (0x246F, "M", "16"),
+ (0x2470, "M", "17"),
+ (0x2471, "M", "18"),
+ (0x2472, "M", "19"),
+ (0x2473, "M", "20"),
+ (0x2474, "M", "(1)"),
+ (0x2475, "M", "(2)"),
+ (0x2476, "M", "(3)"),
+ (0x2477, "M", "(4)"),
+ (0x2478, "M", "(5)"),
+ (0x2479, "M", "(6)"),
+ (0x247A, "M", "(7)"),
+ (0x247B, "M", "(8)"),
+ (0x247C, "M", "(9)"),
+ (0x247D, "M", "(10)"),
+ (0x247E, "M", "(11)"),
+ (0x247F, "M", "(12)"),
+ (0x2480, "M", "(13)"),
+ (0x2481, "M", "(14)"),
+ (0x2482, "M", "(15)"),
+ (0x2483, "M", "(16)"),
+ (0x2484, "M", "(17)"),
+ (0x2485, "M", "(18)"),
+ (0x2486, "M", "(19)"),
+ (0x2487, "M", "(20)"),
+ (0x2488, "X"),
+ (0x249C, "M", "(a)"),
+ (0x249D, "M", "(b)"),
+ (0x249E, "M", "(c)"),
+ (0x249F, "M", "(d)"),
+ ]
+
+
+def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x24A0, "M", "(e)"),
+ (0x24A1, "M", "(f)"),
+ (0x24A2, "M", "(g)"),
+ (0x24A3, "M", "(h)"),
+ (0x24A4, "M", "(i)"),
+ (0x24A5, "M", "(j)"),
+ (0x24A6, "M", "(k)"),
+ (0x24A7, "M", "(l)"),
+ (0x24A8, "M", "(m)"),
+ (0x24A9, "M", "(n)"),
+ (0x24AA, "M", "(o)"),
+ (0x24AB, "M", "(p)"),
+ (0x24AC, "M", "(q)"),
+ (0x24AD, "M", "(r)"),
+ (0x24AE, "M", "(s)"),
+ (0x24AF, "M", "(t)"),
+ (0x24B0, "M", "(u)"),
+ (0x24B1, "M", "(v)"),
+ (0x24B2, "M", "(w)"),
+ (0x24B3, "M", "(x)"),
+ (0x24B4, "M", "(y)"),
+ (0x24B5, "M", "(z)"),
+ (0x24B6, "M", "a"),
+ (0x24B7, "M", "b"),
+ (0x24B8, "M", "c"),
+ (0x24B9, "M", "d"),
+ (0x24BA, "M", "e"),
+ (0x24BB, "M", "f"),
+ (0x24BC, "M", "g"),
+ (0x24BD, "M", "h"),
+ (0x24BE, "M", "i"),
+ (0x24BF, "M", "j"),
+ (0x24C0, "M", "k"),
+ (0x24C1, "M", "l"),
+ (0x24C2, "M", "m"),
+ (0x24C3, "M", "n"),
+ (0x24C4, "M", "o"),
+ (0x24C5, "M", "p"),
+ (0x24C6, "M", "q"),
+ (0x24C7, "M", "r"),
+ (0x24C8, "M", "s"),
+ (0x24C9, "M", "t"),
+ (0x24CA, "M", "u"),
+ (0x24CB, "M", "v"),
+ (0x24CC, "M", "w"),
+ (0x24CD, "M", "x"),
+ (0x24CE, "M", "y"),
+ (0x24CF, "M", "z"),
+ (0x24D0, "M", "a"),
+ (0x24D1, "M", "b"),
+ (0x24D2, "M", "c"),
+ (0x24D3, "M", "d"),
+ (0x24D4, "M", "e"),
+ (0x24D5, "M", "f"),
+ (0x24D6, "M", "g"),
+ (0x24D7, "M", "h"),
+ (0x24D8, "M", "i"),
+ (0x24D9, "M", "j"),
+ (0x24DA, "M", "k"),
+ (0x24DB, "M", "l"),
+ (0x24DC, "M", "m"),
+ (0x24DD, "M", "n"),
+ (0x24DE, "M", "o"),
+ (0x24DF, "M", "p"),
+ (0x24E0, "M", "q"),
+ (0x24E1, "M", "r"),
+ (0x24E2, "M", "s"),
+ (0x24E3, "M", "t"),
+ (0x24E4, "M", "u"),
+ (0x24E5, "M", "v"),
+ (0x24E6, "M", "w"),
+ (0x24E7, "M", "x"),
+ (0x24E8, "M", "y"),
+ (0x24E9, "M", "z"),
+ (0x24EA, "M", "0"),
+ (0x24EB, "V"),
+ (0x2A0C, "M", "∫∫∫∫"),
+ (0x2A0D, "V"),
+ (0x2A74, "M", "::="),
+ (0x2A75, "M", "=="),
+ (0x2A76, "M", "==="),
+ (0x2A77, "V"),
+ (0x2ADC, "M", "⫝̸"),
+ (0x2ADD, "V"),
+ (0x2B74, "X"),
+ (0x2B76, "V"),
+ (0x2B96, "X"),
+ (0x2B97, "V"),
+ (0x2C00, "M", "ⰰ"),
+ (0x2C01, "M", "ⰱ"),
+ (0x2C02, "M", "ⰲ"),
+ (0x2C03, "M", "ⰳ"),
+ (0x2C04, "M", "ⰴ"),
+ (0x2C05, "M", "ⰵ"),
+ (0x2C06, "M", "ⰶ"),
+ (0x2C07, "M", "ⰷ"),
+ (0x2C08, "M", "ⰸ"),
+ (0x2C09, "M", "ⰹ"),
+ (0x2C0A, "M", "ⰺ"),
+ (0x2C0B, "M", "ⰻ"),
+ ]
+
+
+def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2C0C, "M", "ⰼ"),
+ (0x2C0D, "M", "ⰽ"),
+ (0x2C0E, "M", "ⰾ"),
+ (0x2C0F, "M", "ⰿ"),
+ (0x2C10, "M", "ⱀ"),
+ (0x2C11, "M", "ⱁ"),
+ (0x2C12, "M", "ⱂ"),
+ (0x2C13, "M", "ⱃ"),
+ (0x2C14, "M", "ⱄ"),
+ (0x2C15, "M", "ⱅ"),
+ (0x2C16, "M", "ⱆ"),
+ (0x2C17, "M", "ⱇ"),
+ (0x2C18, "M", "ⱈ"),
+ (0x2C19, "M", "ⱉ"),
+ (0x2C1A, "M", "ⱊ"),
+ (0x2C1B, "M", "ⱋ"),
+ (0x2C1C, "M", "ⱌ"),
+ (0x2C1D, "M", "ⱍ"),
+ (0x2C1E, "M", "ⱎ"),
+ (0x2C1F, "M", "ⱏ"),
+ (0x2C20, "M", "ⱐ"),
+ (0x2C21, "M", "ⱑ"),
+ (0x2C22, "M", "ⱒ"),
+ (0x2C23, "M", "ⱓ"),
+ (0x2C24, "M", "ⱔ"),
+ (0x2C25, "M", "ⱕ"),
+ (0x2C26, "M", "ⱖ"),
+ (0x2C27, "M", "ⱗ"),
+ (0x2C28, "M", "ⱘ"),
+ (0x2C29, "M", "ⱙ"),
+ (0x2C2A, "M", "ⱚ"),
+ (0x2C2B, "M", "ⱛ"),
+ (0x2C2C, "M", "ⱜ"),
+ (0x2C2D, "M", "ⱝ"),
+ (0x2C2E, "M", "ⱞ"),
+ (0x2C2F, "M", "ⱟ"),
+ (0x2C30, "V"),
+ (0x2C60, "M", "ⱡ"),
+ (0x2C61, "V"),
+ (0x2C62, "M", "ɫ"),
+ (0x2C63, "M", "ᵽ"),
+ (0x2C64, "M", "ɽ"),
+ (0x2C65, "V"),
+ (0x2C67, "M", "ⱨ"),
+ (0x2C68, "V"),
+ (0x2C69, "M", "ⱪ"),
+ (0x2C6A, "V"),
+ (0x2C6B, "M", "ⱬ"),
+ (0x2C6C, "V"),
+ (0x2C6D, "M", "ɑ"),
+ (0x2C6E, "M", "ɱ"),
+ (0x2C6F, "M", "ɐ"),
+ (0x2C70, "M", "ɒ"),
+ (0x2C71, "V"),
+ (0x2C72, "M", "ⱳ"),
+ (0x2C73, "V"),
+ (0x2C75, "M", "ⱶ"),
+ (0x2C76, "V"),
+ (0x2C7C, "M", "j"),
+ (0x2C7D, "M", "v"),
+ (0x2C7E, "M", "ȿ"),
+ (0x2C7F, "M", "ɀ"),
+ (0x2C80, "M", "ⲁ"),
+ (0x2C81, "V"),
+ (0x2C82, "M", "ⲃ"),
+ (0x2C83, "V"),
+ (0x2C84, "M", "ⲅ"),
+ (0x2C85, "V"),
+ (0x2C86, "M", "ⲇ"),
+ (0x2C87, "V"),
+ (0x2C88, "M", "ⲉ"),
+ (0x2C89, "V"),
+ (0x2C8A, "M", "ⲋ"),
+ (0x2C8B, "V"),
+ (0x2C8C, "M", "ⲍ"),
+ (0x2C8D, "V"),
+ (0x2C8E, "M", "ⲏ"),
+ (0x2C8F, "V"),
+ (0x2C90, "M", "ⲑ"),
+ (0x2C91, "V"),
+ (0x2C92, "M", "ⲓ"),
+ (0x2C93, "V"),
+ (0x2C94, "M", "ⲕ"),
+ (0x2C95, "V"),
+ (0x2C96, "M", "ⲗ"),
+ (0x2C97, "V"),
+ (0x2C98, "M", "ⲙ"),
+ (0x2C99, "V"),
+ (0x2C9A, "M", "ⲛ"),
+ (0x2C9B, "V"),
+ (0x2C9C, "M", "ⲝ"),
+ (0x2C9D, "V"),
+ (0x2C9E, "M", "ⲟ"),
+ (0x2C9F, "V"),
+ (0x2CA0, "M", "ⲡ"),
+ (0x2CA1, "V"),
+ (0x2CA2, "M", "ⲣ"),
+ (0x2CA3, "V"),
+ (0x2CA4, "M", "ⲥ"),
+ (0x2CA5, "V"),
+ ]
+
+
+def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2CA6, "M", "ⲧ"),
+ (0x2CA7, "V"),
+ (0x2CA8, "M", "ⲩ"),
+ (0x2CA9, "V"),
+ (0x2CAA, "M", "ⲫ"),
+ (0x2CAB, "V"),
+ (0x2CAC, "M", "ⲭ"),
+ (0x2CAD, "V"),
+ (0x2CAE, "M", "ⲯ"),
+ (0x2CAF, "V"),
+ (0x2CB0, "M", "ⲱ"),
+ (0x2CB1, "V"),
+ (0x2CB2, "M", "ⲳ"),
+ (0x2CB3, "V"),
+ (0x2CB4, "M", "ⲵ"),
+ (0x2CB5, "V"),
+ (0x2CB6, "M", "ⲷ"),
+ (0x2CB7, "V"),
+ (0x2CB8, "M", "ⲹ"),
+ (0x2CB9, "V"),
+ (0x2CBA, "M", "ⲻ"),
+ (0x2CBB, "V"),
+ (0x2CBC, "M", "ⲽ"),
+ (0x2CBD, "V"),
+ (0x2CBE, "M", "ⲿ"),
+ (0x2CBF, "V"),
+ (0x2CC0, "M", "ⳁ"),
+ (0x2CC1, "V"),
+ (0x2CC2, "M", "ⳃ"),
+ (0x2CC3, "V"),
+ (0x2CC4, "M", "ⳅ"),
+ (0x2CC5, "V"),
+ (0x2CC6, "M", "ⳇ"),
+ (0x2CC7, "V"),
+ (0x2CC8, "M", "ⳉ"),
+ (0x2CC9, "V"),
+ (0x2CCA, "M", "ⳋ"),
+ (0x2CCB, "V"),
+ (0x2CCC, "M", "ⳍ"),
+ (0x2CCD, "V"),
+ (0x2CCE, "M", "ⳏ"),
+ (0x2CCF, "V"),
+ (0x2CD0, "M", "ⳑ"),
+ (0x2CD1, "V"),
+ (0x2CD2, "M", "ⳓ"),
+ (0x2CD3, "V"),
+ (0x2CD4, "M", "ⳕ"),
+ (0x2CD5, "V"),
+ (0x2CD6, "M", "ⳗ"),
+ (0x2CD7, "V"),
+ (0x2CD8, "M", "ⳙ"),
+ (0x2CD9, "V"),
+ (0x2CDA, "M", "ⳛ"),
+ (0x2CDB, "V"),
+ (0x2CDC, "M", "ⳝ"),
+ (0x2CDD, "V"),
+ (0x2CDE, "M", "ⳟ"),
+ (0x2CDF, "V"),
+ (0x2CE0, "M", "ⳡ"),
+ (0x2CE1, "V"),
+ (0x2CE2, "M", "ⳣ"),
+ (0x2CE3, "V"),
+ (0x2CEB, "M", "ⳬ"),
+ (0x2CEC, "V"),
+ (0x2CED, "M", "ⳮ"),
+ (0x2CEE, "V"),
+ (0x2CF2, "M", "ⳳ"),
+ (0x2CF3, "V"),
+ (0x2CF4, "X"),
+ (0x2CF9, "V"),
+ (0x2D26, "X"),
+ (0x2D27, "V"),
+ (0x2D28, "X"),
+ (0x2D2D, "V"),
+ (0x2D2E, "X"),
+ (0x2D30, "V"),
+ (0x2D68, "X"),
+ (0x2D6F, "M", "ⵡ"),
+ (0x2D70, "V"),
+ (0x2D71, "X"),
+ (0x2D7F, "V"),
+ (0x2D97, "X"),
+ (0x2DA0, "V"),
+ (0x2DA7, "X"),
+ (0x2DA8, "V"),
+ (0x2DAF, "X"),
+ (0x2DB0, "V"),
+ (0x2DB7, "X"),
+ (0x2DB8, "V"),
+ (0x2DBF, "X"),
+ (0x2DC0, "V"),
+ (0x2DC7, "X"),
+ (0x2DC8, "V"),
+ (0x2DCF, "X"),
+ (0x2DD0, "V"),
+ (0x2DD7, "X"),
+ (0x2DD8, "V"),
+ (0x2DDF, "X"),
+ (0x2DE0, "V"),
+ (0x2E5E, "X"),
+ ]
+
+
+def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2E80, "V"),
+ (0x2E9A, "X"),
+ (0x2E9B, "V"),
+ (0x2E9F, "M", "母"),
+ (0x2EA0, "V"),
+ (0x2EF3, "M", "龟"),
+ (0x2EF4, "X"),
+ (0x2F00, "M", "一"),
+ (0x2F01, "M", "丨"),
+ (0x2F02, "M", "丶"),
+ (0x2F03, "M", "丿"),
+ (0x2F04, "M", "乙"),
+ (0x2F05, "M", "亅"),
+ (0x2F06, "M", "二"),
+ (0x2F07, "M", "亠"),
+ (0x2F08, "M", "人"),
+ (0x2F09, "M", "儿"),
+ (0x2F0A, "M", "入"),
+ (0x2F0B, "M", "八"),
+ (0x2F0C, "M", "冂"),
+ (0x2F0D, "M", "冖"),
+ (0x2F0E, "M", "冫"),
+ (0x2F0F, "M", "几"),
+ (0x2F10, "M", "凵"),
+ (0x2F11, "M", "刀"),
+ (0x2F12, "M", "力"),
+ (0x2F13, "M", "勹"),
+ (0x2F14, "M", "匕"),
+ (0x2F15, "M", "匚"),
+ (0x2F16, "M", "匸"),
+ (0x2F17, "M", "十"),
+ (0x2F18, "M", "卜"),
+ (0x2F19, "M", "卩"),
+ (0x2F1A, "M", "厂"),
+ (0x2F1B, "M", "厶"),
+ (0x2F1C, "M", "又"),
+ (0x2F1D, "M", "口"),
+ (0x2F1E, "M", "囗"),
+ (0x2F1F, "M", "土"),
+ (0x2F20, "M", "士"),
+ (0x2F21, "M", "夂"),
+ (0x2F22, "M", "夊"),
+ (0x2F23, "M", "夕"),
+ (0x2F24, "M", "大"),
+ (0x2F25, "M", "女"),
+ (0x2F26, "M", "子"),
+ (0x2F27, "M", "宀"),
+ (0x2F28, "M", "寸"),
+ (0x2F29, "M", "小"),
+ (0x2F2A, "M", "尢"),
+ (0x2F2B, "M", "尸"),
+ (0x2F2C, "M", "屮"),
+ (0x2F2D, "M", "山"),
+ (0x2F2E, "M", "巛"),
+ (0x2F2F, "M", "工"),
+ (0x2F30, "M", "己"),
+ (0x2F31, "M", "巾"),
+ (0x2F32, "M", "干"),
+ (0x2F33, "M", "幺"),
+ (0x2F34, "M", "广"),
+ (0x2F35, "M", "廴"),
+ (0x2F36, "M", "廾"),
+ (0x2F37, "M", "弋"),
+ (0x2F38, "M", "弓"),
+ (0x2F39, "M", "彐"),
+ (0x2F3A, "M", "彡"),
+ (0x2F3B, "M", "彳"),
+ (0x2F3C, "M", "心"),
+ (0x2F3D, "M", "戈"),
+ (0x2F3E, "M", "戶"),
+ (0x2F3F, "M", "手"),
+ (0x2F40, "M", "支"),
+ (0x2F41, "M", "攴"),
+ (0x2F42, "M", "文"),
+ (0x2F43, "M", "斗"),
+ (0x2F44, "M", "斤"),
+ (0x2F45, "M", "方"),
+ (0x2F46, "M", "无"),
+ (0x2F47, "M", "日"),
+ (0x2F48, "M", "曰"),
+ (0x2F49, "M", "月"),
+ (0x2F4A, "M", "木"),
+ (0x2F4B, "M", "欠"),
+ (0x2F4C, "M", "止"),
+ (0x2F4D, "M", "歹"),
+ (0x2F4E, "M", "殳"),
+ (0x2F4F, "M", "毋"),
+ (0x2F50, "M", "比"),
+ (0x2F51, "M", "毛"),
+ (0x2F52, "M", "氏"),
+ (0x2F53, "M", "气"),
+ (0x2F54, "M", "水"),
+ (0x2F55, "M", "火"),
+ (0x2F56, "M", "爪"),
+ (0x2F57, "M", "父"),
+ (0x2F58, "M", "爻"),
+ (0x2F59, "M", "爿"),
+ (0x2F5A, "M", "片"),
+ (0x2F5B, "M", "牙"),
+ (0x2F5C, "M", "牛"),
+ ]
+
+
+def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F5D, "M", "犬"),
+ (0x2F5E, "M", "玄"),
+ (0x2F5F, "M", "玉"),
+ (0x2F60, "M", "瓜"),
+ (0x2F61, "M", "瓦"),
+ (0x2F62, "M", "甘"),
+ (0x2F63, "M", "生"),
+ (0x2F64, "M", "用"),
+ (0x2F65, "M", "田"),
+ (0x2F66, "M", "疋"),
+ (0x2F67, "M", "疒"),
+ (0x2F68, "M", "癶"),
+ (0x2F69, "M", "白"),
+ (0x2F6A, "M", "皮"),
+ (0x2F6B, "M", "皿"),
+ (0x2F6C, "M", "目"),
+ (0x2F6D, "M", "矛"),
+ (0x2F6E, "M", "矢"),
+ (0x2F6F, "M", "石"),
+ (0x2F70, "M", "示"),
+ (0x2F71, "M", "禸"),
+ (0x2F72, "M", "禾"),
+ (0x2F73, "M", "穴"),
+ (0x2F74, "M", "立"),
+ (0x2F75, "M", "竹"),
+ (0x2F76, "M", "米"),
+ (0x2F77, "M", "糸"),
+ (0x2F78, "M", "缶"),
+ (0x2F79, "M", "网"),
+ (0x2F7A, "M", "羊"),
+ (0x2F7B, "M", "羽"),
+ (0x2F7C, "M", "老"),
+ (0x2F7D, "M", "而"),
+ (0x2F7E, "M", "耒"),
+ (0x2F7F, "M", "耳"),
+ (0x2F80, "M", "聿"),
+ (0x2F81, "M", "肉"),
+ (0x2F82, "M", "臣"),
+ (0x2F83, "M", "自"),
+ (0x2F84, "M", "至"),
+ (0x2F85, "M", "臼"),
+ (0x2F86, "M", "舌"),
+ (0x2F87, "M", "舛"),
+ (0x2F88, "M", "舟"),
+ (0x2F89, "M", "艮"),
+ (0x2F8A, "M", "色"),
+ (0x2F8B, "M", "艸"),
+ (0x2F8C, "M", "虍"),
+ (0x2F8D, "M", "虫"),
+ (0x2F8E, "M", "血"),
+ (0x2F8F, "M", "行"),
+ (0x2F90, "M", "衣"),
+ (0x2F91, "M", "襾"),
+ (0x2F92, "M", "見"),
+ (0x2F93, "M", "角"),
+ (0x2F94, "M", "言"),
+ (0x2F95, "M", "谷"),
+ (0x2F96, "M", "豆"),
+ (0x2F97, "M", "豕"),
+ (0x2F98, "M", "豸"),
+ (0x2F99, "M", "貝"),
+ (0x2F9A, "M", "赤"),
+ (0x2F9B, "M", "走"),
+ (0x2F9C, "M", "足"),
+ (0x2F9D, "M", "身"),
+ (0x2F9E, "M", "車"),
+ (0x2F9F, "M", "辛"),
+ (0x2FA0, "M", "辰"),
+ (0x2FA1, "M", "辵"),
+ (0x2FA2, "M", "邑"),
+ (0x2FA3, "M", "酉"),
+ (0x2FA4, "M", "釆"),
+ (0x2FA5, "M", "里"),
+ (0x2FA6, "M", "金"),
+ (0x2FA7, "M", "長"),
+ (0x2FA8, "M", "門"),
+ (0x2FA9, "M", "阜"),
+ (0x2FAA, "M", "隶"),
+ (0x2FAB, "M", "隹"),
+ (0x2FAC, "M", "雨"),
+ (0x2FAD, "M", "靑"),
+ (0x2FAE, "M", "非"),
+ (0x2FAF, "M", "面"),
+ (0x2FB0, "M", "革"),
+ (0x2FB1, "M", "韋"),
+ (0x2FB2, "M", "韭"),
+ (0x2FB3, "M", "音"),
+ (0x2FB4, "M", "頁"),
+ (0x2FB5, "M", "風"),
+ (0x2FB6, "M", "飛"),
+ (0x2FB7, "M", "食"),
+ (0x2FB8, "M", "首"),
+ (0x2FB9, "M", "香"),
+ (0x2FBA, "M", "馬"),
+ (0x2FBB, "M", "骨"),
+ (0x2FBC, "M", "高"),
+ (0x2FBD, "M", "髟"),
+ (0x2FBE, "M", "鬥"),
+ (0x2FBF, "M", "鬯"),
+ (0x2FC0, "M", "鬲"),
+ ]
+
+
+def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2FC1, "M", "鬼"),
+ (0x2FC2, "M", "魚"),
+ (0x2FC3, "M", "鳥"),
+ (0x2FC4, "M", "鹵"),
+ (0x2FC5, "M", "鹿"),
+ (0x2FC6, "M", "麥"),
+ (0x2FC7, "M", "麻"),
+ (0x2FC8, "M", "黃"),
+ (0x2FC9, "M", "黍"),
+ (0x2FCA, "M", "黑"),
+ (0x2FCB, "M", "黹"),
+ (0x2FCC, "M", "黽"),
+ (0x2FCD, "M", "鼎"),
+ (0x2FCE, "M", "鼓"),
+ (0x2FCF, "M", "鼠"),
+ (0x2FD0, "M", "鼻"),
+ (0x2FD1, "M", "齊"),
+ (0x2FD2, "M", "齒"),
+ (0x2FD3, "M", "龍"),
+ (0x2FD4, "M", "龜"),
+ (0x2FD5, "M", "龠"),
+ (0x2FD6, "X"),
+ (0x3000, "M", " "),
+ (0x3001, "V"),
+ (0x3002, "M", "."),
+ (0x3003, "V"),
+ (0x3036, "M", "〒"),
+ (0x3037, "V"),
+ (0x3038, "M", "十"),
+ (0x3039, "M", "卄"),
+ (0x303A, "M", "卅"),
+ (0x303B, "V"),
+ (0x3040, "X"),
+ (0x3041, "V"),
+ (0x3097, "X"),
+ (0x3099, "V"),
+ (0x309B, "M", " ゙"),
+ (0x309C, "M", " ゚"),
+ (0x309D, "V"),
+ (0x309F, "M", "より"),
+ (0x30A0, "V"),
+ (0x30FF, "M", "コト"),
+ (0x3100, "X"),
+ (0x3105, "V"),
+ (0x3130, "X"),
+ (0x3131, "M", "ᄀ"),
+ (0x3132, "M", "ᄁ"),
+ (0x3133, "M", "ᆪ"),
+ (0x3134, "M", "ᄂ"),
+ (0x3135, "M", "ᆬ"),
+ (0x3136, "M", "ᆭ"),
+ (0x3137, "M", "ᄃ"),
+ (0x3138, "M", "ᄄ"),
+ (0x3139, "M", "ᄅ"),
+ (0x313A, "M", "ᆰ"),
+ (0x313B, "M", "ᆱ"),
+ (0x313C, "M", "ᆲ"),
+ (0x313D, "M", "ᆳ"),
+ (0x313E, "M", "ᆴ"),
+ (0x313F, "M", "ᆵ"),
+ (0x3140, "M", "ᄚ"),
+ (0x3141, "M", "ᄆ"),
+ (0x3142, "M", "ᄇ"),
+ (0x3143, "M", "ᄈ"),
+ (0x3144, "M", "ᄡ"),
+ (0x3145, "M", "ᄉ"),
+ (0x3146, "M", "ᄊ"),
+ (0x3147, "M", "ᄋ"),
+ (0x3148, "M", "ᄌ"),
+ (0x3149, "M", "ᄍ"),
+ (0x314A, "M", "ᄎ"),
+ (0x314B, "M", "ᄏ"),
+ (0x314C, "M", "ᄐ"),
+ (0x314D, "M", "ᄑ"),
+ (0x314E, "M", "ᄒ"),
+ (0x314F, "M", "ᅡ"),
+ (0x3150, "M", "ᅢ"),
+ (0x3151, "M", "ᅣ"),
+ (0x3152, "M", "ᅤ"),
+ (0x3153, "M", "ᅥ"),
+ (0x3154, "M", "ᅦ"),
+ (0x3155, "M", "ᅧ"),
+ (0x3156, "M", "ᅨ"),
+ (0x3157, "M", "ᅩ"),
+ (0x3158, "M", "ᅪ"),
+ (0x3159, "M", "ᅫ"),
+ (0x315A, "M", "ᅬ"),
+ (0x315B, "M", "ᅭ"),
+ (0x315C, "M", "ᅮ"),
+ (0x315D, "M", "ᅯ"),
+ (0x315E, "M", "ᅰ"),
+ (0x315F, "M", "ᅱ"),
+ (0x3160, "M", "ᅲ"),
+ (0x3161, "M", "ᅳ"),
+ (0x3162, "M", "ᅴ"),
+ (0x3163, "M", "ᅵ"),
+ (0x3164, "I"),
+ (0x3165, "M", "ᄔ"),
+ (0x3166, "M", "ᄕ"),
+ (0x3167, "M", "ᇇ"),
+ ]
+
+
+def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3168, "M", "ᇈ"),
+ (0x3169, "M", "ᇌ"),
+ (0x316A, "M", "ᇎ"),
+ (0x316B, "M", "ᇓ"),
+ (0x316C, "M", "ᇗ"),
+ (0x316D, "M", "ᇙ"),
+ (0x316E, "M", "ᄜ"),
+ (0x316F, "M", "ᇝ"),
+ (0x3170, "M", "ᇟ"),
+ (0x3171, "M", "ᄝ"),
+ (0x3172, "M", "ᄞ"),
+ (0x3173, "M", "ᄠ"),
+ (0x3174, "M", "ᄢ"),
+ (0x3175, "M", "ᄣ"),
+ (0x3176, "M", "ᄧ"),
+ (0x3177, "M", "ᄩ"),
+ (0x3178, "M", "ᄫ"),
+ (0x3179, "M", "ᄬ"),
+ (0x317A, "M", "ᄭ"),
+ (0x317B, "M", "ᄮ"),
+ (0x317C, "M", "ᄯ"),
+ (0x317D, "M", "ᄲ"),
+ (0x317E, "M", "ᄶ"),
+ (0x317F, "M", "ᅀ"),
+ (0x3180, "M", "ᅇ"),
+ (0x3181, "M", "ᅌ"),
+ (0x3182, "M", "ᇱ"),
+ (0x3183, "M", "ᇲ"),
+ (0x3184, "M", "ᅗ"),
+ (0x3185, "M", "ᅘ"),
+ (0x3186, "M", "ᅙ"),
+ (0x3187, "M", "ᆄ"),
+ (0x3188, "M", "ᆅ"),
+ (0x3189, "M", "ᆈ"),
+ (0x318A, "M", "ᆑ"),
+ (0x318B, "M", "ᆒ"),
+ (0x318C, "M", "ᆔ"),
+ (0x318D, "M", "ᆞ"),
+ (0x318E, "M", "ᆡ"),
+ (0x318F, "X"),
+ (0x3190, "V"),
+ (0x3192, "M", "一"),
+ (0x3193, "M", "二"),
+ (0x3194, "M", "三"),
+ (0x3195, "M", "四"),
+ (0x3196, "M", "上"),
+ (0x3197, "M", "中"),
+ (0x3198, "M", "下"),
+ (0x3199, "M", "甲"),
+ (0x319A, "M", "乙"),
+ (0x319B, "M", "丙"),
+ (0x319C, "M", "丁"),
+ (0x319D, "M", "天"),
+ (0x319E, "M", "地"),
+ (0x319F, "M", "人"),
+ (0x31A0, "V"),
+ (0x31E6, "X"),
+ (0x31F0, "V"),
+ (0x3200, "M", "(ᄀ)"),
+ (0x3201, "M", "(ᄂ)"),
+ (0x3202, "M", "(ᄃ)"),
+ (0x3203, "M", "(ᄅ)"),
+ (0x3204, "M", "(ᄆ)"),
+ (0x3205, "M", "(ᄇ)"),
+ (0x3206, "M", "(ᄉ)"),
+ (0x3207, "M", "(ᄋ)"),
+ (0x3208, "M", "(ᄌ)"),
+ (0x3209, "M", "(ᄎ)"),
+ (0x320A, "M", "(ᄏ)"),
+ (0x320B, "M", "(ᄐ)"),
+ (0x320C, "M", "(ᄑ)"),
+ (0x320D, "M", "(ᄒ)"),
+ (0x320E, "M", "(가)"),
+ (0x320F, "M", "(나)"),
+ (0x3210, "M", "(다)"),
+ (0x3211, "M", "(라)"),
+ (0x3212, "M", "(마)"),
+ (0x3213, "M", "(바)"),
+ (0x3214, "M", "(사)"),
+ (0x3215, "M", "(아)"),
+ (0x3216, "M", "(자)"),
+ (0x3217, "M", "(차)"),
+ (0x3218, "M", "(카)"),
+ (0x3219, "M", "(타)"),
+ (0x321A, "M", "(파)"),
+ (0x321B, "M", "(하)"),
+ (0x321C, "M", "(주)"),
+ (0x321D, "M", "(오전)"),
+ (0x321E, "M", "(오후)"),
+ (0x321F, "X"),
+ (0x3220, "M", "(一)"),
+ (0x3221, "M", "(二)"),
+ (0x3222, "M", "(三)"),
+ (0x3223, "M", "(四)"),
+ (0x3224, "M", "(五)"),
+ (0x3225, "M", "(六)"),
+ (0x3226, "M", "(七)"),
+ (0x3227, "M", "(八)"),
+ (0x3228, "M", "(九)"),
+ (0x3229, "M", "(十)"),
+ ]
+
+
+def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x322A, "M", "(月)"),
+ (0x322B, "M", "(火)"),
+ (0x322C, "M", "(水)"),
+ (0x322D, "M", "(木)"),
+ (0x322E, "M", "(金)"),
+ (0x322F, "M", "(土)"),
+ (0x3230, "M", "(日)"),
+ (0x3231, "M", "(株)"),
+ (0x3232, "M", "(有)"),
+ (0x3233, "M", "(社)"),
+ (0x3234, "M", "(名)"),
+ (0x3235, "M", "(特)"),
+ (0x3236, "M", "(財)"),
+ (0x3237, "M", "(祝)"),
+ (0x3238, "M", "(労)"),
+ (0x3239, "M", "(代)"),
+ (0x323A, "M", "(呼)"),
+ (0x323B, "M", "(学)"),
+ (0x323C, "M", "(監)"),
+ (0x323D, "M", "(企)"),
+ (0x323E, "M", "(資)"),
+ (0x323F, "M", "(協)"),
+ (0x3240, "M", "(祭)"),
+ (0x3241, "M", "(休)"),
+ (0x3242, "M", "(自)"),
+ (0x3243, "M", "(至)"),
+ (0x3244, "M", "問"),
+ (0x3245, "M", "幼"),
+ (0x3246, "M", "文"),
+ (0x3247, "M", "箏"),
+ (0x3248, "V"),
+ (0x3250, "M", "pte"),
+ (0x3251, "M", "21"),
+ (0x3252, "M", "22"),
+ (0x3253, "M", "23"),
+ (0x3254, "M", "24"),
+ (0x3255, "M", "25"),
+ (0x3256, "M", "26"),
+ (0x3257, "M", "27"),
+ (0x3258, "M", "28"),
+ (0x3259, "M", "29"),
+ (0x325A, "M", "30"),
+ (0x325B, "M", "31"),
+ (0x325C, "M", "32"),
+ (0x325D, "M", "33"),
+ (0x325E, "M", "34"),
+ (0x325F, "M", "35"),
+ (0x3260, "M", "ᄀ"),
+ (0x3261, "M", "ᄂ"),
+ (0x3262, "M", "ᄃ"),
+ (0x3263, "M", "ᄅ"),
+ (0x3264, "M", "ᄆ"),
+ (0x3265, "M", "ᄇ"),
+ (0x3266, "M", "ᄉ"),
+ (0x3267, "M", "ᄋ"),
+ (0x3268, "M", "ᄌ"),
+ (0x3269, "M", "ᄎ"),
+ (0x326A, "M", "ᄏ"),
+ (0x326B, "M", "ᄐ"),
+ (0x326C, "M", "ᄑ"),
+ (0x326D, "M", "ᄒ"),
+ (0x326E, "M", "가"),
+ (0x326F, "M", "나"),
+ (0x3270, "M", "다"),
+ (0x3271, "M", "라"),
+ (0x3272, "M", "마"),
+ (0x3273, "M", "바"),
+ (0x3274, "M", "사"),
+ (0x3275, "M", "아"),
+ (0x3276, "M", "자"),
+ (0x3277, "M", "차"),
+ (0x3278, "M", "카"),
+ (0x3279, "M", "타"),
+ (0x327A, "M", "파"),
+ (0x327B, "M", "하"),
+ (0x327C, "M", "참고"),
+ (0x327D, "M", "주의"),
+ (0x327E, "M", "우"),
+ (0x327F, "V"),
+ (0x3280, "M", "一"),
+ (0x3281, "M", "二"),
+ (0x3282, "M", "三"),
+ (0x3283, "M", "四"),
+ (0x3284, "M", "五"),
+ (0x3285, "M", "六"),
+ (0x3286, "M", "七"),
+ (0x3287, "M", "八"),
+ (0x3288, "M", "九"),
+ (0x3289, "M", "十"),
+ (0x328A, "M", "月"),
+ (0x328B, "M", "火"),
+ (0x328C, "M", "水"),
+ (0x328D, "M", "木"),
+ (0x328E, "M", "金"),
+ (0x328F, "M", "土"),
+ (0x3290, "M", "日"),
+ (0x3291, "M", "株"),
+ (0x3292, "M", "有"),
+ (0x3293, "M", "社"),
+ (0x3294, "M", "名"),
+ ]
+
+
+def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x3295, "M", "特"),
+ (0x3296, "M", "財"),
+ (0x3297, "M", "祝"),
+ (0x3298, "M", "労"),
+ (0x3299, "M", "秘"),
+ (0x329A, "M", "男"),
+ (0x329B, "M", "女"),
+ (0x329C, "M", "適"),
+ (0x329D, "M", "優"),
+ (0x329E, "M", "印"),
+ (0x329F, "M", "注"),
+ (0x32A0, "M", "項"),
+ (0x32A1, "M", "休"),
+ (0x32A2, "M", "写"),
+ (0x32A3, "M", "正"),
+ (0x32A4, "M", "上"),
+ (0x32A5, "M", "中"),
+ (0x32A6, "M", "下"),
+ (0x32A7, "M", "左"),
+ (0x32A8, "M", "右"),
+ (0x32A9, "M", "医"),
+ (0x32AA, "M", "宗"),
+ (0x32AB, "M", "学"),
+ (0x32AC, "M", "監"),
+ (0x32AD, "M", "企"),
+ (0x32AE, "M", "資"),
+ (0x32AF, "M", "協"),
+ (0x32B0, "M", "夜"),
+ (0x32B1, "M", "36"),
+ (0x32B2, "M", "37"),
+ (0x32B3, "M", "38"),
+ (0x32B4, "M", "39"),
+ (0x32B5, "M", "40"),
+ (0x32B6, "M", "41"),
+ (0x32B7, "M", "42"),
+ (0x32B8, "M", "43"),
+ (0x32B9, "M", "44"),
+ (0x32BA, "M", "45"),
+ (0x32BB, "M", "46"),
+ (0x32BC, "M", "47"),
+ (0x32BD, "M", "48"),
+ (0x32BE, "M", "49"),
+ (0x32BF, "M", "50"),
+ (0x32C0, "M", "1月"),
+ (0x32C1, "M", "2月"),
+ (0x32C2, "M", "3月"),
+ (0x32C3, "M", "4月"),
+ (0x32C4, "M", "5月"),
+ (0x32C5, "M", "6月"),
+ (0x32C6, "M", "7月"),
+ (0x32C7, "M", "8月"),
+ (0x32C8, "M", "9月"),
+ (0x32C9, "M", "10月"),
+ (0x32CA, "M", "11月"),
+ (0x32CB, "M", "12月"),
+ (0x32CC, "M", "hg"),
+ (0x32CD, "M", "erg"),
+ (0x32CE, "M", "ev"),
+ (0x32CF, "M", "ltd"),
+ (0x32D0, "M", "ア"),
+ (0x32D1, "M", "イ"),
+ (0x32D2, "M", "ウ"),
+ (0x32D3, "M", "エ"),
+ (0x32D4, "M", "オ"),
+ (0x32D5, "M", "カ"),
+ (0x32D6, "M", "キ"),
+ (0x32D7, "M", "ク"),
+ (0x32D8, "M", "ケ"),
+ (0x32D9, "M", "コ"),
+ (0x32DA, "M", "サ"),
+ (0x32DB, "M", "シ"),
+ (0x32DC, "M", "ス"),
+ (0x32DD, "M", "セ"),
+ (0x32DE, "M", "ソ"),
+ (0x32DF, "M", "タ"),
+ (0x32E0, "M", "チ"),
+ (0x32E1, "M", "ツ"),
+ (0x32E2, "M", "テ"),
+ (0x32E3, "M", "ト"),
+ (0x32E4, "M", "ナ"),
+ (0x32E5, "M", "ニ"),
+ (0x32E6, "M", "ヌ"),
+ (0x32E7, "M", "ネ"),
+ (0x32E8, "M", "ノ"),
+ (0x32E9, "M", "ハ"),
+ (0x32EA, "M", "ヒ"),
+ (0x32EB, "M", "フ"),
+ (0x32EC, "M", "ヘ"),
+ (0x32ED, "M", "ホ"),
+ (0x32EE, "M", "マ"),
+ (0x32EF, "M", "ミ"),
+ (0x32F0, "M", "ム"),
+ (0x32F1, "M", "メ"),
+ (0x32F2, "M", "モ"),
+ (0x32F3, "M", "ヤ"),
+ (0x32F4, "M", "ユ"),
+ (0x32F5, "M", "ヨ"),
+ (0x32F6, "M", "ラ"),
+ (0x32F7, "M", "リ"),
+ (0x32F8, "M", "ル"),
+ ]
+
+
+def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x32F9, "M", "レ"),
+ (0x32FA, "M", "ロ"),
+ (0x32FB, "M", "ワ"),
+ (0x32FC, "M", "ヰ"),
+ (0x32FD, "M", "ヱ"),
+ (0x32FE, "M", "ヲ"),
+ (0x32FF, "M", "令和"),
+ (0x3300, "M", "アパート"),
+ (0x3301, "M", "アルファ"),
+ (0x3302, "M", "アンペア"),
+ (0x3303, "M", "アール"),
+ (0x3304, "M", "イニング"),
+ (0x3305, "M", "インチ"),
+ (0x3306, "M", "ウォン"),
+ (0x3307, "M", "エスクード"),
+ (0x3308, "M", "エーカー"),
+ (0x3309, "M", "オンス"),
+ (0x330A, "M", "オーム"),
+ (0x330B, "M", "カイリ"),
+ (0x330C, "M", "カラット"),
+ (0x330D, "M", "カロリー"),
+ (0x330E, "M", "ガロン"),
+ (0x330F, "M", "ガンマ"),
+ (0x3310, "M", "ギガ"),
+ (0x3311, "M", "ギニー"),
+ (0x3312, "M", "キュリー"),
+ (0x3313, "M", "ギルダー"),
+ (0x3314, "M", "キロ"),
+ (0x3315, "M", "キログラム"),
+ (0x3316, "M", "キロメートル"),
+ (0x3317, "M", "キロワット"),
+ (0x3318, "M", "グラム"),
+ (0x3319, "M", "グラムトン"),
+ (0x331A, "M", "クルゼイロ"),
+ (0x331B, "M", "クローネ"),
+ (0x331C, "M", "ケース"),
+ (0x331D, "M", "コルナ"),
+ (0x331E, "M", "コーポ"),
+ (0x331F, "M", "サイクル"),
+ (0x3320, "M", "サンチーム"),
+ (0x3321, "M", "シリング"),
+ (0x3322, "M", "センチ"),
+ (0x3323, "M", "セント"),
+ (0x3324, "M", "ダース"),
+ (0x3325, "M", "デシ"),
+ (0x3326, "M", "ドル"),
+ (0x3327, "M", "トン"),
+ (0x3328, "M", "ナノ"),
+ (0x3329, "M", "ノット"),
+ (0x332A, "M", "ハイツ"),
+ (0x332B, "M", "パーセント"),
+ (0x332C, "M", "パーツ"),
+ (0x332D, "M", "バーレル"),
+ (0x332E, "M", "ピアストル"),
+ (0x332F, "M", "ピクル"),
+ (0x3330, "M", "ピコ"),
+ (0x3331, "M", "ビル"),
+ (0x3332, "M", "ファラッド"),
+ (0x3333, "M", "フィート"),
+ (0x3334, "M", "ブッシェル"),
+ (0x3335, "M", "フラン"),
+ (0x3336, "M", "ヘクタール"),
+ (0x3337, "M", "ペソ"),
+ (0x3338, "M", "ペニヒ"),
+ (0x3339, "M", "ヘルツ"),
+ (0x333A, "M", "ペンス"),
+ (0x333B, "M", "ページ"),
+ (0x333C, "M", "ベータ"),
+ (0x333D, "M", "ポイント"),
+ (0x333E, "M", "ボルト"),
+ (0x333F, "M", "ホン"),
+ (0x3340, "M", "ポンド"),
+ (0x3341, "M", "ホール"),
+ (0x3342, "M", "ホーン"),
+ (0x3343, "M", "マイクロ"),
+ (0x3344, "M", "マイル"),
+ (0x3345, "M", "マッハ"),
+ (0x3346, "M", "マルク"),
+ (0x3347, "M", "マンション"),
+ (0x3348, "M", "ミクロン"),
+ (0x3349, "M", "ミリ"),
+ (0x334A, "M", "ミリバール"),
+ (0x334B, "M", "メガ"),
+ (0x334C, "M", "メガトン"),
+ (0x334D, "M", "メートル"),
+ (0x334E, "M", "ヤード"),
+ (0x334F, "M", "ヤール"),
+ (0x3350, "M", "ユアン"),
+ (0x3351, "M", "リットル"),
+ (0x3352, "M", "リラ"),
+ (0x3353, "M", "ルピー"),
+ (0x3354, "M", "ルーブル"),
+ (0x3355, "M", "レム"),
+ (0x3356, "M", "レントゲン"),
+ (0x3357, "M", "ワット"),
+ (0x3358, "M", "0点"),
+ (0x3359, "M", "1点"),
+ (0x335A, "M", "2点"),
+ (0x335B, "M", "3点"),
+ (0x335C, "M", "4点"),
+ ]
+
+
+def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x335D, "M", "5点"),
+ (0x335E, "M", "6点"),
+ (0x335F, "M", "7点"),
+ (0x3360, "M", "8点"),
+ (0x3361, "M", "9点"),
+ (0x3362, "M", "10点"),
+ (0x3363, "M", "11点"),
+ (0x3364, "M", "12点"),
+ (0x3365, "M", "13点"),
+ (0x3366, "M", "14点"),
+ (0x3367, "M", "15点"),
+ (0x3368, "M", "16点"),
+ (0x3369, "M", "17点"),
+ (0x336A, "M", "18点"),
+ (0x336B, "M", "19点"),
+ (0x336C, "M", "20点"),
+ (0x336D, "M", "21点"),
+ (0x336E, "M", "22点"),
+ (0x336F, "M", "23点"),
+ (0x3370, "M", "24点"),
+ (0x3371, "M", "hpa"),
+ (0x3372, "M", "da"),
+ (0x3373, "M", "au"),
+ (0x3374, "M", "bar"),
+ (0x3375, "M", "ov"),
+ (0x3376, "M", "pc"),
+ (0x3377, "M", "dm"),
+ (0x3378, "M", "dm2"),
+ (0x3379, "M", "dm3"),
+ (0x337A, "M", "iu"),
+ (0x337B, "M", "平成"),
+ (0x337C, "M", "昭和"),
+ (0x337D, "M", "大正"),
+ (0x337E, "M", "明治"),
+ (0x337F, "M", "株式会社"),
+ (0x3380, "M", "pa"),
+ (0x3381, "M", "na"),
+ (0x3382, "M", "μa"),
+ (0x3383, "M", "ma"),
+ (0x3384, "M", "ka"),
+ (0x3385, "M", "kb"),
+ (0x3386, "M", "mb"),
+ (0x3387, "M", "gb"),
+ (0x3388, "M", "cal"),
+ (0x3389, "M", "kcal"),
+ (0x338A, "M", "pf"),
+ (0x338B, "M", "nf"),
+ (0x338C, "M", "μf"),
+ (0x338D, "M", "μg"),
+ (0x338E, "M", "mg"),
+ (0x338F, "M", "kg"),
+ (0x3390, "M", "hz"),
+ (0x3391, "M", "khz"),
+ (0x3392, "M", "mhz"),
+ (0x3393, "M", "ghz"),
+ (0x3394, "M", "thz"),
+ (0x3395, "M", "μl"),
+ (0x3396, "M", "ml"),
+ (0x3397, "M", "dl"),
+ (0x3398, "M", "kl"),
+ (0x3399, "M", "fm"),
+ (0x339A, "M", "nm"),
+ (0x339B, "M", "μm"),
+ (0x339C, "M", "mm"),
+ (0x339D, "M", "cm"),
+ (0x339E, "M", "km"),
+ (0x339F, "M", "mm2"),
+ (0x33A0, "M", "cm2"),
+ (0x33A1, "M", "m2"),
+ (0x33A2, "M", "km2"),
+ (0x33A3, "M", "mm3"),
+ (0x33A4, "M", "cm3"),
+ (0x33A5, "M", "m3"),
+ (0x33A6, "M", "km3"),
+ (0x33A7, "M", "m∕s"),
+ (0x33A8, "M", "m∕s2"),
+ (0x33A9, "M", "pa"),
+ (0x33AA, "M", "kpa"),
+ (0x33AB, "M", "mpa"),
+ (0x33AC, "M", "gpa"),
+ (0x33AD, "M", "rad"),
+ (0x33AE, "M", "rad∕s"),
+ (0x33AF, "M", "rad∕s2"),
+ (0x33B0, "M", "ps"),
+ (0x33B1, "M", "ns"),
+ (0x33B2, "M", "μs"),
+ (0x33B3, "M", "ms"),
+ (0x33B4, "M", "pv"),
+ (0x33B5, "M", "nv"),
+ (0x33B6, "M", "μv"),
+ (0x33B7, "M", "mv"),
+ (0x33B8, "M", "kv"),
+ (0x33B9, "M", "mv"),
+ (0x33BA, "M", "pw"),
+ (0x33BB, "M", "nw"),
+ (0x33BC, "M", "μw"),
+ (0x33BD, "M", "mw"),
+ (0x33BE, "M", "kw"),
+ (0x33BF, "M", "mw"),
+ (0x33C0, "M", "kω"),
+ ]
+
+
+def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x33C1, "M", "mω"),
+ (0x33C2, "X"),
+ (0x33C3, "M", "bq"),
+ (0x33C4, "M", "cc"),
+ (0x33C5, "M", "cd"),
+ (0x33C6, "M", "c∕kg"),
+ (0x33C7, "X"),
+ (0x33C8, "M", "db"),
+ (0x33C9, "M", "gy"),
+ (0x33CA, "M", "ha"),
+ (0x33CB, "M", "hp"),
+ (0x33CC, "M", "in"),
+ (0x33CD, "M", "kk"),
+ (0x33CE, "M", "km"),
+ (0x33CF, "M", "kt"),
+ (0x33D0, "M", "lm"),
+ (0x33D1, "M", "ln"),
+ (0x33D2, "M", "log"),
+ (0x33D3, "M", "lx"),
+ (0x33D4, "M", "mb"),
+ (0x33D5, "M", "mil"),
+ (0x33D6, "M", "mol"),
+ (0x33D7, "M", "ph"),
+ (0x33D8, "X"),
+ (0x33D9, "M", "ppm"),
+ (0x33DA, "M", "pr"),
+ (0x33DB, "M", "sr"),
+ (0x33DC, "M", "sv"),
+ (0x33DD, "M", "wb"),
+ (0x33DE, "M", "v∕m"),
+ (0x33DF, "M", "a∕m"),
+ (0x33E0, "M", "1日"),
+ (0x33E1, "M", "2日"),
+ (0x33E2, "M", "3日"),
+ (0x33E3, "M", "4日"),
+ (0x33E4, "M", "5日"),
+ (0x33E5, "M", "6日"),
+ (0x33E6, "M", "7日"),
+ (0x33E7, "M", "8日"),
+ (0x33E8, "M", "9日"),
+ (0x33E9, "M", "10日"),
+ (0x33EA, "M", "11日"),
+ (0x33EB, "M", "12日"),
+ (0x33EC, "M", "13日"),
+ (0x33ED, "M", "14日"),
+ (0x33EE, "M", "15日"),
+ (0x33EF, "M", "16日"),
+ (0x33F0, "M", "17日"),
+ (0x33F1, "M", "18日"),
+ (0x33F2, "M", "19日"),
+ (0x33F3, "M", "20日"),
+ (0x33F4, "M", "21日"),
+ (0x33F5, "M", "22日"),
+ (0x33F6, "M", "23日"),
+ (0x33F7, "M", "24日"),
+ (0x33F8, "M", "25日"),
+ (0x33F9, "M", "26日"),
+ (0x33FA, "M", "27日"),
+ (0x33FB, "M", "28日"),
+ (0x33FC, "M", "29日"),
+ (0x33FD, "M", "30日"),
+ (0x33FE, "M", "31日"),
+ (0x33FF, "M", "gal"),
+ (0x3400, "V"),
+ (0xA48D, "X"),
+ (0xA490, "V"),
+ (0xA4C7, "X"),
+ (0xA4D0, "V"),
+ (0xA62C, "X"),
+ (0xA640, "M", "ꙁ"),
+ (0xA641, "V"),
+ (0xA642, "M", "ꙃ"),
+ (0xA643, "V"),
+ (0xA644, "M", "ꙅ"),
+ (0xA645, "V"),
+ (0xA646, "M", "ꙇ"),
+ (0xA647, "V"),
+ (0xA648, "M", "ꙉ"),
+ (0xA649, "V"),
+ (0xA64A, "M", "ꙋ"),
+ (0xA64B, "V"),
+ (0xA64C, "M", "ꙍ"),
+ (0xA64D, "V"),
+ (0xA64E, "M", "ꙏ"),
+ (0xA64F, "V"),
+ (0xA650, "M", "ꙑ"),
+ (0xA651, "V"),
+ (0xA652, "M", "ꙓ"),
+ (0xA653, "V"),
+ (0xA654, "M", "ꙕ"),
+ (0xA655, "V"),
+ (0xA656, "M", "ꙗ"),
+ (0xA657, "V"),
+ (0xA658, "M", "ꙙ"),
+ (0xA659, "V"),
+ (0xA65A, "M", "ꙛ"),
+ (0xA65B, "V"),
+ (0xA65C, "M", "ꙝ"),
+ (0xA65D, "V"),
+ (0xA65E, "M", "ꙟ"),
+ ]
+
+
+def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA65F, "V"),
+ (0xA660, "M", "ꙡ"),
+ (0xA661, "V"),
+ (0xA662, "M", "ꙣ"),
+ (0xA663, "V"),
+ (0xA664, "M", "ꙥ"),
+ (0xA665, "V"),
+ (0xA666, "M", "ꙧ"),
+ (0xA667, "V"),
+ (0xA668, "M", "ꙩ"),
+ (0xA669, "V"),
+ (0xA66A, "M", "ꙫ"),
+ (0xA66B, "V"),
+ (0xA66C, "M", "ꙭ"),
+ (0xA66D, "V"),
+ (0xA680, "M", "ꚁ"),
+ (0xA681, "V"),
+ (0xA682, "M", "ꚃ"),
+ (0xA683, "V"),
+ (0xA684, "M", "ꚅ"),
+ (0xA685, "V"),
+ (0xA686, "M", "ꚇ"),
+ (0xA687, "V"),
+ (0xA688, "M", "ꚉ"),
+ (0xA689, "V"),
+ (0xA68A, "M", "ꚋ"),
+ (0xA68B, "V"),
+ (0xA68C, "M", "ꚍ"),
+ (0xA68D, "V"),
+ (0xA68E, "M", "ꚏ"),
+ (0xA68F, "V"),
+ (0xA690, "M", "ꚑ"),
+ (0xA691, "V"),
+ (0xA692, "M", "ꚓ"),
+ (0xA693, "V"),
+ (0xA694, "M", "ꚕ"),
+ (0xA695, "V"),
+ (0xA696, "M", "ꚗ"),
+ (0xA697, "V"),
+ (0xA698, "M", "ꚙ"),
+ (0xA699, "V"),
+ (0xA69A, "M", "ꚛ"),
+ (0xA69B, "V"),
+ (0xA69C, "M", "ъ"),
+ (0xA69D, "M", "ь"),
+ (0xA69E, "V"),
+ (0xA6F8, "X"),
+ (0xA700, "V"),
+ (0xA722, "M", "ꜣ"),
+ (0xA723, "V"),
+ (0xA724, "M", "ꜥ"),
+ (0xA725, "V"),
+ (0xA726, "M", "ꜧ"),
+ (0xA727, "V"),
+ (0xA728, "M", "ꜩ"),
+ (0xA729, "V"),
+ (0xA72A, "M", "ꜫ"),
+ (0xA72B, "V"),
+ (0xA72C, "M", "ꜭ"),
+ (0xA72D, "V"),
+ (0xA72E, "M", "ꜯ"),
+ (0xA72F, "V"),
+ (0xA732, "M", "ꜳ"),
+ (0xA733, "V"),
+ (0xA734, "M", "ꜵ"),
+ (0xA735, "V"),
+ (0xA736, "M", "ꜷ"),
+ (0xA737, "V"),
+ (0xA738, "M", "ꜹ"),
+ (0xA739, "V"),
+ (0xA73A, "M", "ꜻ"),
+ (0xA73B, "V"),
+ (0xA73C, "M", "ꜽ"),
+ (0xA73D, "V"),
+ (0xA73E, "M", "ꜿ"),
+ (0xA73F, "V"),
+ (0xA740, "M", "ꝁ"),
+ (0xA741, "V"),
+ (0xA742, "M", "ꝃ"),
+ (0xA743, "V"),
+ (0xA744, "M", "ꝅ"),
+ (0xA745, "V"),
+ (0xA746, "M", "ꝇ"),
+ (0xA747, "V"),
+ (0xA748, "M", "ꝉ"),
+ (0xA749, "V"),
+ (0xA74A, "M", "ꝋ"),
+ (0xA74B, "V"),
+ (0xA74C, "M", "ꝍ"),
+ (0xA74D, "V"),
+ (0xA74E, "M", "ꝏ"),
+ (0xA74F, "V"),
+ (0xA750, "M", "ꝑ"),
+ (0xA751, "V"),
+ (0xA752, "M", "ꝓ"),
+ (0xA753, "V"),
+ (0xA754, "M", "ꝕ"),
+ (0xA755, "V"),
+ (0xA756, "M", "ꝗ"),
+ (0xA757, "V"),
+ ]
+
+
+def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA758, "M", "ꝙ"),
+ (0xA759, "V"),
+ (0xA75A, "M", "ꝛ"),
+ (0xA75B, "V"),
+ (0xA75C, "M", "ꝝ"),
+ (0xA75D, "V"),
+ (0xA75E, "M", "ꝟ"),
+ (0xA75F, "V"),
+ (0xA760, "M", "ꝡ"),
+ (0xA761, "V"),
+ (0xA762, "M", "ꝣ"),
+ (0xA763, "V"),
+ (0xA764, "M", "ꝥ"),
+ (0xA765, "V"),
+ (0xA766, "M", "ꝧ"),
+ (0xA767, "V"),
+ (0xA768, "M", "ꝩ"),
+ (0xA769, "V"),
+ (0xA76A, "M", "ꝫ"),
+ (0xA76B, "V"),
+ (0xA76C, "M", "ꝭ"),
+ (0xA76D, "V"),
+ (0xA76E, "M", "ꝯ"),
+ (0xA76F, "V"),
+ (0xA770, "M", "ꝯ"),
+ (0xA771, "V"),
+ (0xA779, "M", "ꝺ"),
+ (0xA77A, "V"),
+ (0xA77B, "M", "ꝼ"),
+ (0xA77C, "V"),
+ (0xA77D, "M", "ᵹ"),
+ (0xA77E, "M", "ꝿ"),
+ (0xA77F, "V"),
+ (0xA780, "M", "ꞁ"),
+ (0xA781, "V"),
+ (0xA782, "M", "ꞃ"),
+ (0xA783, "V"),
+ (0xA784, "M", "ꞅ"),
+ (0xA785, "V"),
+ (0xA786, "M", "ꞇ"),
+ (0xA787, "V"),
+ (0xA78B, "M", "ꞌ"),
+ (0xA78C, "V"),
+ (0xA78D, "M", "ɥ"),
+ (0xA78E, "V"),
+ (0xA790, "M", "ꞑ"),
+ (0xA791, "V"),
+ (0xA792, "M", "ꞓ"),
+ (0xA793, "V"),
+ (0xA796, "M", "ꞗ"),
+ (0xA797, "V"),
+ (0xA798, "M", "ꞙ"),
+ (0xA799, "V"),
+ (0xA79A, "M", "ꞛ"),
+ (0xA79B, "V"),
+ (0xA79C, "M", "ꞝ"),
+ (0xA79D, "V"),
+ (0xA79E, "M", "ꞟ"),
+ (0xA79F, "V"),
+ (0xA7A0, "M", "ꞡ"),
+ (0xA7A1, "V"),
+ (0xA7A2, "M", "ꞣ"),
+ (0xA7A3, "V"),
+ (0xA7A4, "M", "ꞥ"),
+ (0xA7A5, "V"),
+ (0xA7A6, "M", "ꞧ"),
+ (0xA7A7, "V"),
+ (0xA7A8, "M", "ꞩ"),
+ (0xA7A9, "V"),
+ (0xA7AA, "M", "ɦ"),
+ (0xA7AB, "M", "ɜ"),
+ (0xA7AC, "M", "ɡ"),
+ (0xA7AD, "M", "ɬ"),
+ (0xA7AE, "M", "ɪ"),
+ (0xA7AF, "V"),
+ (0xA7B0, "M", "ʞ"),
+ (0xA7B1, "M", "ʇ"),
+ (0xA7B2, "M", "ʝ"),
+ (0xA7B3, "M", "ꭓ"),
+ (0xA7B4, "M", "ꞵ"),
+ (0xA7B5, "V"),
+ (0xA7B6, "M", "ꞷ"),
+ (0xA7B7, "V"),
+ (0xA7B8, "M", "ꞹ"),
+ (0xA7B9, "V"),
+ (0xA7BA, "M", "ꞻ"),
+ (0xA7BB, "V"),
+ (0xA7BC, "M", "ꞽ"),
+ (0xA7BD, "V"),
+ (0xA7BE, "M", "ꞿ"),
+ (0xA7BF, "V"),
+ (0xA7C0, "M", "ꟁ"),
+ (0xA7C1, "V"),
+ (0xA7C2, "M", "ꟃ"),
+ (0xA7C3, "V"),
+ (0xA7C4, "M", "ꞔ"),
+ (0xA7C5, "M", "ʂ"),
+ (0xA7C6, "M", "ᶎ"),
+ (0xA7C7, "M", "ꟈ"),
+ (0xA7C8, "V"),
+ ]
+
+
+def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xA7C9, "M", "ꟊ"),
+ (0xA7CA, "V"),
+ (0xA7CB, "M", "ɤ"),
+ (0xA7CC, "M", ""),
+ (0xA7CD, "V"),
+ (0xA7CE, "X"),
+ (0xA7D0, "M", "ꟑ"),
+ (0xA7D1, "V"),
+ (0xA7D2, "X"),
+ (0xA7D3, "V"),
+ (0xA7D4, "X"),
+ (0xA7D5, "V"),
+ (0xA7D6, "M", "ꟗ"),
+ (0xA7D7, "V"),
+ (0xA7D8, "M", "ꟙ"),
+ (0xA7D9, "V"),
+ (0xA7DA, "M", ""),
+ (0xA7DB, "V"),
+ (0xA7DC, "M", "ƛ"),
+ (0xA7DD, "X"),
+ (0xA7F2, "M", "c"),
+ (0xA7F3, "M", "f"),
+ (0xA7F4, "M", "q"),
+ (0xA7F5, "M", "ꟶ"),
+ (0xA7F6, "V"),
+ (0xA7F8, "M", "ħ"),
+ (0xA7F9, "M", "œ"),
+ (0xA7FA, "V"),
+ (0xA82D, "X"),
+ (0xA830, "V"),
+ (0xA83A, "X"),
+ (0xA840, "V"),
+ (0xA878, "X"),
+ (0xA880, "V"),
+ (0xA8C6, "X"),
+ (0xA8CE, "V"),
+ (0xA8DA, "X"),
+ (0xA8E0, "V"),
+ (0xA954, "X"),
+ (0xA95F, "V"),
+ (0xA97D, "X"),
+ (0xA980, "V"),
+ (0xA9CE, "X"),
+ (0xA9CF, "V"),
+ (0xA9DA, "X"),
+ (0xA9DE, "V"),
+ (0xA9FF, "X"),
+ (0xAA00, "V"),
+ (0xAA37, "X"),
+ (0xAA40, "V"),
+ (0xAA4E, "X"),
+ (0xAA50, "V"),
+ (0xAA5A, "X"),
+ (0xAA5C, "V"),
+ (0xAAC3, "X"),
+ (0xAADB, "V"),
+ (0xAAF7, "X"),
+ (0xAB01, "V"),
+ (0xAB07, "X"),
+ (0xAB09, "V"),
+ (0xAB0F, "X"),
+ (0xAB11, "V"),
+ (0xAB17, "X"),
+ (0xAB20, "V"),
+ (0xAB27, "X"),
+ (0xAB28, "V"),
+ (0xAB2F, "X"),
+ (0xAB30, "V"),
+ (0xAB5C, "M", "ꜧ"),
+ (0xAB5D, "M", "ꬷ"),
+ (0xAB5E, "M", "ɫ"),
+ (0xAB5F, "M", "ꭒ"),
+ (0xAB60, "V"),
+ (0xAB69, "M", "ʍ"),
+ (0xAB6A, "V"),
+ (0xAB6C, "X"),
+ (0xAB70, "M", "Ꭰ"),
+ (0xAB71, "M", "Ꭱ"),
+ (0xAB72, "M", "Ꭲ"),
+ (0xAB73, "M", "Ꭳ"),
+ (0xAB74, "M", "Ꭴ"),
+ (0xAB75, "M", "Ꭵ"),
+ (0xAB76, "M", "Ꭶ"),
+ (0xAB77, "M", "Ꭷ"),
+ (0xAB78, "M", "Ꭸ"),
+ (0xAB79, "M", "Ꭹ"),
+ (0xAB7A, "M", "Ꭺ"),
+ (0xAB7B, "M", "Ꭻ"),
+ (0xAB7C, "M", "Ꭼ"),
+ (0xAB7D, "M", "Ꭽ"),
+ (0xAB7E, "M", "Ꭾ"),
+ (0xAB7F, "M", "Ꭿ"),
+ (0xAB80, "M", "Ꮀ"),
+ (0xAB81, "M", "Ꮁ"),
+ (0xAB82, "M", "Ꮂ"),
+ (0xAB83, "M", "Ꮃ"),
+ (0xAB84, "M", "Ꮄ"),
+ (0xAB85, "M", "Ꮅ"),
+ (0xAB86, "M", "Ꮆ"),
+ (0xAB87, "M", "Ꮇ"),
+ ]
+
+
+def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xAB88, "M", "Ꮈ"),
+ (0xAB89, "M", "Ꮉ"),
+ (0xAB8A, "M", "Ꮊ"),
+ (0xAB8B, "M", "Ꮋ"),
+ (0xAB8C, "M", "Ꮌ"),
+ (0xAB8D, "M", "Ꮍ"),
+ (0xAB8E, "M", "Ꮎ"),
+ (0xAB8F, "M", "Ꮏ"),
+ (0xAB90, "M", "Ꮐ"),
+ (0xAB91, "M", "Ꮑ"),
+ (0xAB92, "M", "Ꮒ"),
+ (0xAB93, "M", "Ꮓ"),
+ (0xAB94, "M", "Ꮔ"),
+ (0xAB95, "M", "Ꮕ"),
+ (0xAB96, "M", "Ꮖ"),
+ (0xAB97, "M", "Ꮗ"),
+ (0xAB98, "M", "Ꮘ"),
+ (0xAB99, "M", "Ꮙ"),
+ (0xAB9A, "M", "Ꮚ"),
+ (0xAB9B, "M", "Ꮛ"),
+ (0xAB9C, "M", "Ꮜ"),
+ (0xAB9D, "M", "Ꮝ"),
+ (0xAB9E, "M", "Ꮞ"),
+ (0xAB9F, "M", "Ꮟ"),
+ (0xABA0, "M", "Ꮠ"),
+ (0xABA1, "M", "Ꮡ"),
+ (0xABA2, "M", "Ꮢ"),
+ (0xABA3, "M", "Ꮣ"),
+ (0xABA4, "M", "Ꮤ"),
+ (0xABA5, "M", "Ꮥ"),
+ (0xABA6, "M", "Ꮦ"),
+ (0xABA7, "M", "Ꮧ"),
+ (0xABA8, "M", "Ꮨ"),
+ (0xABA9, "M", "Ꮩ"),
+ (0xABAA, "M", "Ꮪ"),
+ (0xABAB, "M", "Ꮫ"),
+ (0xABAC, "M", "Ꮬ"),
+ (0xABAD, "M", "Ꮭ"),
+ (0xABAE, "M", "Ꮮ"),
+ (0xABAF, "M", "Ꮯ"),
+ (0xABB0, "M", "Ꮰ"),
+ (0xABB1, "M", "Ꮱ"),
+ (0xABB2, "M", "Ꮲ"),
+ (0xABB3, "M", "Ꮳ"),
+ (0xABB4, "M", "Ꮴ"),
+ (0xABB5, "M", "Ꮵ"),
+ (0xABB6, "M", "Ꮶ"),
+ (0xABB7, "M", "Ꮷ"),
+ (0xABB8, "M", "Ꮸ"),
+ (0xABB9, "M", "Ꮹ"),
+ (0xABBA, "M", "Ꮺ"),
+ (0xABBB, "M", "Ꮻ"),
+ (0xABBC, "M", "Ꮼ"),
+ (0xABBD, "M", "Ꮽ"),
+ (0xABBE, "M", "Ꮾ"),
+ (0xABBF, "M", "Ꮿ"),
+ (0xABC0, "V"),
+ (0xABEE, "X"),
+ (0xABF0, "V"),
+ (0xABFA, "X"),
+ (0xAC00, "V"),
+ (0xD7A4, "X"),
+ (0xD7B0, "V"),
+ (0xD7C7, "X"),
+ (0xD7CB, "V"),
+ (0xD7FC, "X"),
+ (0xF900, "M", "豈"),
+ (0xF901, "M", "更"),
+ (0xF902, "M", "車"),
+ (0xF903, "M", "賈"),
+ (0xF904, "M", "滑"),
+ (0xF905, "M", "串"),
+ (0xF906, "M", "句"),
+ (0xF907, "M", "龜"),
+ (0xF909, "M", "契"),
+ (0xF90A, "M", "金"),
+ (0xF90B, "M", "喇"),
+ (0xF90C, "M", "奈"),
+ (0xF90D, "M", "懶"),
+ (0xF90E, "M", "癩"),
+ (0xF90F, "M", "羅"),
+ (0xF910, "M", "蘿"),
+ (0xF911, "M", "螺"),
+ (0xF912, "M", "裸"),
+ (0xF913, "M", "邏"),
+ (0xF914, "M", "樂"),
+ (0xF915, "M", "洛"),
+ (0xF916, "M", "烙"),
+ (0xF917, "M", "珞"),
+ (0xF918, "M", "落"),
+ (0xF919, "M", "酪"),
+ (0xF91A, "M", "駱"),
+ (0xF91B, "M", "亂"),
+ (0xF91C, "M", "卵"),
+ (0xF91D, "M", "欄"),
+ (0xF91E, "M", "爛"),
+ (0xF91F, "M", "蘭"),
+ (0xF920, "M", "鸞"),
+ (0xF921, "M", "嵐"),
+ (0xF922, "M", "濫"),
+ ]
+
+
+def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF923, "M", "藍"),
+ (0xF924, "M", "襤"),
+ (0xF925, "M", "拉"),
+ (0xF926, "M", "臘"),
+ (0xF927, "M", "蠟"),
+ (0xF928, "M", "廊"),
+ (0xF929, "M", "朗"),
+ (0xF92A, "M", "浪"),
+ (0xF92B, "M", "狼"),
+ (0xF92C, "M", "郎"),
+ (0xF92D, "M", "來"),
+ (0xF92E, "M", "冷"),
+ (0xF92F, "M", "勞"),
+ (0xF930, "M", "擄"),
+ (0xF931, "M", "櫓"),
+ (0xF932, "M", "爐"),
+ (0xF933, "M", "盧"),
+ (0xF934, "M", "老"),
+ (0xF935, "M", "蘆"),
+ (0xF936, "M", "虜"),
+ (0xF937, "M", "路"),
+ (0xF938, "M", "露"),
+ (0xF939, "M", "魯"),
+ (0xF93A, "M", "鷺"),
+ (0xF93B, "M", "碌"),
+ (0xF93C, "M", "祿"),
+ (0xF93D, "M", "綠"),
+ (0xF93E, "M", "菉"),
+ (0xF93F, "M", "錄"),
+ (0xF940, "M", "鹿"),
+ (0xF941, "M", "論"),
+ (0xF942, "M", "壟"),
+ (0xF943, "M", "弄"),
+ (0xF944, "M", "籠"),
+ (0xF945, "M", "聾"),
+ (0xF946, "M", "牢"),
+ (0xF947, "M", "磊"),
+ (0xF948, "M", "賂"),
+ (0xF949, "M", "雷"),
+ (0xF94A, "M", "壘"),
+ (0xF94B, "M", "屢"),
+ (0xF94C, "M", "樓"),
+ (0xF94D, "M", "淚"),
+ (0xF94E, "M", "漏"),
+ (0xF94F, "M", "累"),
+ (0xF950, "M", "縷"),
+ (0xF951, "M", "陋"),
+ (0xF952, "M", "勒"),
+ (0xF953, "M", "肋"),
+ (0xF954, "M", "凜"),
+ (0xF955, "M", "凌"),
+ (0xF956, "M", "稜"),
+ (0xF957, "M", "綾"),
+ (0xF958, "M", "菱"),
+ (0xF959, "M", "陵"),
+ (0xF95A, "M", "讀"),
+ (0xF95B, "M", "拏"),
+ (0xF95C, "M", "樂"),
+ (0xF95D, "M", "諾"),
+ (0xF95E, "M", "丹"),
+ (0xF95F, "M", "寧"),
+ (0xF960, "M", "怒"),
+ (0xF961, "M", "率"),
+ (0xF962, "M", "異"),
+ (0xF963, "M", "北"),
+ (0xF964, "M", "磻"),
+ (0xF965, "M", "便"),
+ (0xF966, "M", "復"),
+ (0xF967, "M", "不"),
+ (0xF968, "M", "泌"),
+ (0xF969, "M", "數"),
+ (0xF96A, "M", "索"),
+ (0xF96B, "M", "參"),
+ (0xF96C, "M", "塞"),
+ (0xF96D, "M", "省"),
+ (0xF96E, "M", "葉"),
+ (0xF96F, "M", "說"),
+ (0xF970, "M", "殺"),
+ (0xF971, "M", "辰"),
+ (0xF972, "M", "沈"),
+ (0xF973, "M", "拾"),
+ (0xF974, "M", "若"),
+ (0xF975, "M", "掠"),
+ (0xF976, "M", "略"),
+ (0xF977, "M", "亮"),
+ (0xF978, "M", "兩"),
+ (0xF979, "M", "凉"),
+ (0xF97A, "M", "梁"),
+ (0xF97B, "M", "糧"),
+ (0xF97C, "M", "良"),
+ (0xF97D, "M", "諒"),
+ (0xF97E, "M", "量"),
+ (0xF97F, "M", "勵"),
+ (0xF980, "M", "呂"),
+ (0xF981, "M", "女"),
+ (0xF982, "M", "廬"),
+ (0xF983, "M", "旅"),
+ (0xF984, "M", "濾"),
+ (0xF985, "M", "礪"),
+ (0xF986, "M", "閭"),
+ ]
+
+
+def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF987, "M", "驪"),
+ (0xF988, "M", "麗"),
+ (0xF989, "M", "黎"),
+ (0xF98A, "M", "力"),
+ (0xF98B, "M", "曆"),
+ (0xF98C, "M", "歷"),
+ (0xF98D, "M", "轢"),
+ (0xF98E, "M", "年"),
+ (0xF98F, "M", "憐"),
+ (0xF990, "M", "戀"),
+ (0xF991, "M", "撚"),
+ (0xF992, "M", "漣"),
+ (0xF993, "M", "煉"),
+ (0xF994, "M", "璉"),
+ (0xF995, "M", "秊"),
+ (0xF996, "M", "練"),
+ (0xF997, "M", "聯"),
+ (0xF998, "M", "輦"),
+ (0xF999, "M", "蓮"),
+ (0xF99A, "M", "連"),
+ (0xF99B, "M", "鍊"),
+ (0xF99C, "M", "列"),
+ (0xF99D, "M", "劣"),
+ (0xF99E, "M", "咽"),
+ (0xF99F, "M", "烈"),
+ (0xF9A0, "M", "裂"),
+ (0xF9A1, "M", "說"),
+ (0xF9A2, "M", "廉"),
+ (0xF9A3, "M", "念"),
+ (0xF9A4, "M", "捻"),
+ (0xF9A5, "M", "殮"),
+ (0xF9A6, "M", "簾"),
+ (0xF9A7, "M", "獵"),
+ (0xF9A8, "M", "令"),
+ (0xF9A9, "M", "囹"),
+ (0xF9AA, "M", "寧"),
+ (0xF9AB, "M", "嶺"),
+ (0xF9AC, "M", "怜"),
+ (0xF9AD, "M", "玲"),
+ (0xF9AE, "M", "瑩"),
+ (0xF9AF, "M", "羚"),
+ (0xF9B0, "M", "聆"),
+ (0xF9B1, "M", "鈴"),
+ (0xF9B2, "M", "零"),
+ (0xF9B3, "M", "靈"),
+ (0xF9B4, "M", "領"),
+ (0xF9B5, "M", "例"),
+ (0xF9B6, "M", "禮"),
+ (0xF9B7, "M", "醴"),
+ (0xF9B8, "M", "隸"),
+ (0xF9B9, "M", "惡"),
+ (0xF9BA, "M", "了"),
+ (0xF9BB, "M", "僚"),
+ (0xF9BC, "M", "寮"),
+ (0xF9BD, "M", "尿"),
+ (0xF9BE, "M", "料"),
+ (0xF9BF, "M", "樂"),
+ (0xF9C0, "M", "燎"),
+ (0xF9C1, "M", "療"),
+ (0xF9C2, "M", "蓼"),
+ (0xF9C3, "M", "遼"),
+ (0xF9C4, "M", "龍"),
+ (0xF9C5, "M", "暈"),
+ (0xF9C6, "M", "阮"),
+ (0xF9C7, "M", "劉"),
+ (0xF9C8, "M", "杻"),
+ (0xF9C9, "M", "柳"),
+ (0xF9CA, "M", "流"),
+ (0xF9CB, "M", "溜"),
+ (0xF9CC, "M", "琉"),
+ (0xF9CD, "M", "留"),
+ (0xF9CE, "M", "硫"),
+ (0xF9CF, "M", "紐"),
+ (0xF9D0, "M", "類"),
+ (0xF9D1, "M", "六"),
+ (0xF9D2, "M", "戮"),
+ (0xF9D3, "M", "陸"),
+ (0xF9D4, "M", "倫"),
+ (0xF9D5, "M", "崙"),
+ (0xF9D6, "M", "淪"),
+ (0xF9D7, "M", "輪"),
+ (0xF9D8, "M", "律"),
+ (0xF9D9, "M", "慄"),
+ (0xF9DA, "M", "栗"),
+ (0xF9DB, "M", "率"),
+ (0xF9DC, "M", "隆"),
+ (0xF9DD, "M", "利"),
+ (0xF9DE, "M", "吏"),
+ (0xF9DF, "M", "履"),
+ (0xF9E0, "M", "易"),
+ (0xF9E1, "M", "李"),
+ (0xF9E2, "M", "梨"),
+ (0xF9E3, "M", "泥"),
+ (0xF9E4, "M", "理"),
+ (0xF9E5, "M", "痢"),
+ (0xF9E6, "M", "罹"),
+ (0xF9E7, "M", "裏"),
+ (0xF9E8, "M", "裡"),
+ (0xF9E9, "M", "里"),
+ (0xF9EA, "M", "離"),
+ ]
+
+
+def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xF9EB, "M", "匿"),
+ (0xF9EC, "M", "溺"),
+ (0xF9ED, "M", "吝"),
+ (0xF9EE, "M", "燐"),
+ (0xF9EF, "M", "璘"),
+ (0xF9F0, "M", "藺"),
+ (0xF9F1, "M", "隣"),
+ (0xF9F2, "M", "鱗"),
+ (0xF9F3, "M", "麟"),
+ (0xF9F4, "M", "林"),
+ (0xF9F5, "M", "淋"),
+ (0xF9F6, "M", "臨"),
+ (0xF9F7, "M", "立"),
+ (0xF9F8, "M", "笠"),
+ (0xF9F9, "M", "粒"),
+ (0xF9FA, "M", "狀"),
+ (0xF9FB, "M", "炙"),
+ (0xF9FC, "M", "識"),
+ (0xF9FD, "M", "什"),
+ (0xF9FE, "M", "茶"),
+ (0xF9FF, "M", "刺"),
+ (0xFA00, "M", "切"),
+ (0xFA01, "M", "度"),
+ (0xFA02, "M", "拓"),
+ (0xFA03, "M", "糖"),
+ (0xFA04, "M", "宅"),
+ (0xFA05, "M", "洞"),
+ (0xFA06, "M", "暴"),
+ (0xFA07, "M", "輻"),
+ (0xFA08, "M", "行"),
+ (0xFA09, "M", "降"),
+ (0xFA0A, "M", "見"),
+ (0xFA0B, "M", "廓"),
+ (0xFA0C, "M", "兀"),
+ (0xFA0D, "M", "嗀"),
+ (0xFA0E, "V"),
+ (0xFA10, "M", "塚"),
+ (0xFA11, "V"),
+ (0xFA12, "M", "晴"),
+ (0xFA13, "V"),
+ (0xFA15, "M", "凞"),
+ (0xFA16, "M", "猪"),
+ (0xFA17, "M", "益"),
+ (0xFA18, "M", "礼"),
+ (0xFA19, "M", "神"),
+ (0xFA1A, "M", "祥"),
+ (0xFA1B, "M", "福"),
+ (0xFA1C, "M", "靖"),
+ (0xFA1D, "M", "精"),
+ (0xFA1E, "M", "羽"),
+ (0xFA1F, "V"),
+ (0xFA20, "M", "蘒"),
+ (0xFA21, "V"),
+ (0xFA22, "M", "諸"),
+ (0xFA23, "V"),
+ (0xFA25, "M", "逸"),
+ (0xFA26, "M", "都"),
+ (0xFA27, "V"),
+ (0xFA2A, "M", "飯"),
+ (0xFA2B, "M", "飼"),
+ (0xFA2C, "M", "館"),
+ (0xFA2D, "M", "鶴"),
+ (0xFA2E, "M", "郞"),
+ (0xFA2F, "M", "隷"),
+ (0xFA30, "M", "侮"),
+ (0xFA31, "M", "僧"),
+ (0xFA32, "M", "免"),
+ (0xFA33, "M", "勉"),
+ (0xFA34, "M", "勤"),
+ (0xFA35, "M", "卑"),
+ (0xFA36, "M", "喝"),
+ (0xFA37, "M", "嘆"),
+ (0xFA38, "M", "器"),
+ (0xFA39, "M", "塀"),
+ (0xFA3A, "M", "墨"),
+ (0xFA3B, "M", "層"),
+ (0xFA3C, "M", "屮"),
+ (0xFA3D, "M", "悔"),
+ (0xFA3E, "M", "慨"),
+ (0xFA3F, "M", "憎"),
+ (0xFA40, "M", "懲"),
+ (0xFA41, "M", "敏"),
+ (0xFA42, "M", "既"),
+ (0xFA43, "M", "暑"),
+ (0xFA44, "M", "梅"),
+ (0xFA45, "M", "海"),
+ (0xFA46, "M", "渚"),
+ (0xFA47, "M", "漢"),
+ (0xFA48, "M", "煮"),
+ (0xFA49, "M", "爫"),
+ (0xFA4A, "M", "琢"),
+ (0xFA4B, "M", "碑"),
+ (0xFA4C, "M", "社"),
+ (0xFA4D, "M", "祉"),
+ (0xFA4E, "M", "祈"),
+ (0xFA4F, "M", "祐"),
+ (0xFA50, "M", "祖"),
+ (0xFA51, "M", "祝"),
+ (0xFA52, "M", "禍"),
+ (0xFA53, "M", "禎"),
+ ]
+
+
+def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFA54, "M", "穀"),
+ (0xFA55, "M", "突"),
+ (0xFA56, "M", "節"),
+ (0xFA57, "M", "練"),
+ (0xFA58, "M", "縉"),
+ (0xFA59, "M", "繁"),
+ (0xFA5A, "M", "署"),
+ (0xFA5B, "M", "者"),
+ (0xFA5C, "M", "臭"),
+ (0xFA5D, "M", "艹"),
+ (0xFA5F, "M", "著"),
+ (0xFA60, "M", "褐"),
+ (0xFA61, "M", "視"),
+ (0xFA62, "M", "謁"),
+ (0xFA63, "M", "謹"),
+ (0xFA64, "M", "賓"),
+ (0xFA65, "M", "贈"),
+ (0xFA66, "M", "辶"),
+ (0xFA67, "M", "逸"),
+ (0xFA68, "M", "難"),
+ (0xFA69, "M", "響"),
+ (0xFA6A, "M", "頻"),
+ (0xFA6B, "M", "恵"),
+ (0xFA6C, "M", "𤋮"),
+ (0xFA6D, "M", "舘"),
+ (0xFA6E, "X"),
+ (0xFA70, "M", "並"),
+ (0xFA71, "M", "况"),
+ (0xFA72, "M", "全"),
+ (0xFA73, "M", "侀"),
+ (0xFA74, "M", "充"),
+ (0xFA75, "M", "冀"),
+ (0xFA76, "M", "勇"),
+ (0xFA77, "M", "勺"),
+ (0xFA78, "M", "喝"),
+ (0xFA79, "M", "啕"),
+ (0xFA7A, "M", "喙"),
+ (0xFA7B, "M", "嗢"),
+ (0xFA7C, "M", "塚"),
+ (0xFA7D, "M", "墳"),
+ (0xFA7E, "M", "奄"),
+ (0xFA7F, "M", "奔"),
+ (0xFA80, "M", "婢"),
+ (0xFA81, "M", "嬨"),
+ (0xFA82, "M", "廒"),
+ (0xFA83, "M", "廙"),
+ (0xFA84, "M", "彩"),
+ (0xFA85, "M", "徭"),
+ (0xFA86, "M", "惘"),
+ (0xFA87, "M", "慎"),
+ (0xFA88, "M", "愈"),
+ (0xFA89, "M", "憎"),
+ (0xFA8A, "M", "慠"),
+ (0xFA8B, "M", "懲"),
+ (0xFA8C, "M", "戴"),
+ (0xFA8D, "M", "揄"),
+ (0xFA8E, "M", "搜"),
+ (0xFA8F, "M", "摒"),
+ (0xFA90, "M", "敖"),
+ (0xFA91, "M", "晴"),
+ (0xFA92, "M", "朗"),
+ (0xFA93, "M", "望"),
+ (0xFA94, "M", "杖"),
+ (0xFA95, "M", "歹"),
+ (0xFA96, "M", "殺"),
+ (0xFA97, "M", "流"),
+ (0xFA98, "M", "滛"),
+ (0xFA99, "M", "滋"),
+ (0xFA9A, "M", "漢"),
+ (0xFA9B, "M", "瀞"),
+ (0xFA9C, "M", "煮"),
+ (0xFA9D, "M", "瞧"),
+ (0xFA9E, "M", "爵"),
+ (0xFA9F, "M", "犯"),
+ (0xFAA0, "M", "猪"),
+ (0xFAA1, "M", "瑱"),
+ (0xFAA2, "M", "甆"),
+ (0xFAA3, "M", "画"),
+ (0xFAA4, "M", "瘝"),
+ (0xFAA5, "M", "瘟"),
+ (0xFAA6, "M", "益"),
+ (0xFAA7, "M", "盛"),
+ (0xFAA8, "M", "直"),
+ (0xFAA9, "M", "睊"),
+ (0xFAAA, "M", "着"),
+ (0xFAAB, "M", "磌"),
+ (0xFAAC, "M", "窱"),
+ (0xFAAD, "M", "節"),
+ (0xFAAE, "M", "类"),
+ (0xFAAF, "M", "絛"),
+ (0xFAB0, "M", "練"),
+ (0xFAB1, "M", "缾"),
+ (0xFAB2, "M", "者"),
+ (0xFAB3, "M", "荒"),
+ (0xFAB4, "M", "華"),
+ (0xFAB5, "M", "蝹"),
+ (0xFAB6, "M", "襁"),
+ (0xFAB7, "M", "覆"),
+ (0xFAB8, "M", "視"),
+ (0xFAB9, "M", "調"),
+ ]
+
+
+def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFABA, "M", "諸"),
+ (0xFABB, "M", "請"),
+ (0xFABC, "M", "謁"),
+ (0xFABD, "M", "諾"),
+ (0xFABE, "M", "諭"),
+ (0xFABF, "M", "謹"),
+ (0xFAC0, "M", "變"),
+ (0xFAC1, "M", "贈"),
+ (0xFAC2, "M", "輸"),
+ (0xFAC3, "M", "遲"),
+ (0xFAC4, "M", "醙"),
+ (0xFAC5, "M", "鉶"),
+ (0xFAC6, "M", "陼"),
+ (0xFAC7, "M", "難"),
+ (0xFAC8, "M", "靖"),
+ (0xFAC9, "M", "韛"),
+ (0xFACA, "M", "響"),
+ (0xFACB, "M", "頋"),
+ (0xFACC, "M", "頻"),
+ (0xFACD, "M", "鬒"),
+ (0xFACE, "M", "龜"),
+ (0xFACF, "M", "𢡊"),
+ (0xFAD0, "M", "𢡄"),
+ (0xFAD1, "M", "𣏕"),
+ (0xFAD2, "M", "㮝"),
+ (0xFAD3, "M", "䀘"),
+ (0xFAD4, "M", "䀹"),
+ (0xFAD5, "M", "𥉉"),
+ (0xFAD6, "M", "𥳐"),
+ (0xFAD7, "M", "𧻓"),
+ (0xFAD8, "M", "齃"),
+ (0xFAD9, "M", "龎"),
+ (0xFADA, "X"),
+ (0xFB00, "M", "ff"),
+ (0xFB01, "M", "fi"),
+ (0xFB02, "M", "fl"),
+ (0xFB03, "M", "ffi"),
+ (0xFB04, "M", "ffl"),
+ (0xFB05, "M", "st"),
+ (0xFB07, "X"),
+ (0xFB13, "M", "մն"),
+ (0xFB14, "M", "մե"),
+ (0xFB15, "M", "մի"),
+ (0xFB16, "M", "վն"),
+ (0xFB17, "M", "մխ"),
+ (0xFB18, "X"),
+ (0xFB1D, "M", "יִ"),
+ (0xFB1E, "V"),
+ (0xFB1F, "M", "ײַ"),
+ (0xFB20, "M", "ע"),
+ (0xFB21, "M", "א"),
+ (0xFB22, "M", "ד"),
+ (0xFB23, "M", "ה"),
+ (0xFB24, "M", "כ"),
+ (0xFB25, "M", "ל"),
+ (0xFB26, "M", "ם"),
+ (0xFB27, "M", "ר"),
+ (0xFB28, "M", "ת"),
+ (0xFB29, "M", "+"),
+ (0xFB2A, "M", "שׁ"),
+ (0xFB2B, "M", "שׂ"),
+ (0xFB2C, "M", "שּׁ"),
+ (0xFB2D, "M", "שּׂ"),
+ (0xFB2E, "M", "אַ"),
+ (0xFB2F, "M", "אָ"),
+ (0xFB30, "M", "אּ"),
+ (0xFB31, "M", "בּ"),
+ (0xFB32, "M", "גּ"),
+ (0xFB33, "M", "דּ"),
+ (0xFB34, "M", "הּ"),
+ (0xFB35, "M", "וּ"),
+ (0xFB36, "M", "זּ"),
+ (0xFB37, "X"),
+ (0xFB38, "M", "טּ"),
+ (0xFB39, "M", "יּ"),
+ (0xFB3A, "M", "ךּ"),
+ (0xFB3B, "M", "כּ"),
+ (0xFB3C, "M", "לּ"),
+ (0xFB3D, "X"),
+ (0xFB3E, "M", "מּ"),
+ (0xFB3F, "X"),
+ (0xFB40, "M", "נּ"),
+ (0xFB41, "M", "סּ"),
+ (0xFB42, "X"),
+ (0xFB43, "M", "ףּ"),
+ (0xFB44, "M", "פּ"),
+ (0xFB45, "X"),
+ (0xFB46, "M", "צּ"),
+ (0xFB47, "M", "קּ"),
+ (0xFB48, "M", "רּ"),
+ (0xFB49, "M", "שּ"),
+ (0xFB4A, "M", "תּ"),
+ (0xFB4B, "M", "וֹ"),
+ (0xFB4C, "M", "בֿ"),
+ (0xFB4D, "M", "כֿ"),
+ (0xFB4E, "M", "פֿ"),
+ (0xFB4F, "M", "אל"),
+ (0xFB50, "M", "ٱ"),
+ (0xFB52, "M", "ٻ"),
+ (0xFB56, "M", "پ"),
+ ]
+
+
+def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFB5A, "M", "ڀ"),
+ (0xFB5E, "M", "ٺ"),
+ (0xFB62, "M", "ٿ"),
+ (0xFB66, "M", "ٹ"),
+ (0xFB6A, "M", "ڤ"),
+ (0xFB6E, "M", "ڦ"),
+ (0xFB72, "M", "ڄ"),
+ (0xFB76, "M", "ڃ"),
+ (0xFB7A, "M", "چ"),
+ (0xFB7E, "M", "ڇ"),
+ (0xFB82, "M", "ڍ"),
+ (0xFB84, "M", "ڌ"),
+ (0xFB86, "M", "ڎ"),
+ (0xFB88, "M", "ڈ"),
+ (0xFB8A, "M", "ژ"),
+ (0xFB8C, "M", "ڑ"),
+ (0xFB8E, "M", "ک"),
+ (0xFB92, "M", "گ"),
+ (0xFB96, "M", "ڳ"),
+ (0xFB9A, "M", "ڱ"),
+ (0xFB9E, "M", "ں"),
+ (0xFBA0, "M", "ڻ"),
+ (0xFBA4, "M", "ۀ"),
+ (0xFBA6, "M", "ہ"),
+ (0xFBAA, "M", "ھ"),
+ (0xFBAE, "M", "ے"),
+ (0xFBB0, "M", "ۓ"),
+ (0xFBB2, "V"),
+ (0xFBC3, "X"),
+ (0xFBD3, "M", "ڭ"),
+ (0xFBD7, "M", "ۇ"),
+ (0xFBD9, "M", "ۆ"),
+ (0xFBDB, "M", "ۈ"),
+ (0xFBDD, "M", "ۇٴ"),
+ (0xFBDE, "M", "ۋ"),
+ (0xFBE0, "M", "ۅ"),
+ (0xFBE2, "M", "ۉ"),
+ (0xFBE4, "M", "ې"),
+ (0xFBE8, "M", "ى"),
+ (0xFBEA, "M", "ئا"),
+ (0xFBEC, "M", "ئە"),
+ (0xFBEE, "M", "ئو"),
+ (0xFBF0, "M", "ئۇ"),
+ (0xFBF2, "M", "ئۆ"),
+ (0xFBF4, "M", "ئۈ"),
+ (0xFBF6, "M", "ئې"),
+ (0xFBF9, "M", "ئى"),
+ (0xFBFC, "M", "ی"),
+ (0xFC00, "M", "ئج"),
+ (0xFC01, "M", "ئح"),
+ (0xFC02, "M", "ئم"),
+ (0xFC03, "M", "ئى"),
+ (0xFC04, "M", "ئي"),
+ (0xFC05, "M", "بج"),
+ (0xFC06, "M", "بح"),
+ (0xFC07, "M", "بخ"),
+ (0xFC08, "M", "بم"),
+ (0xFC09, "M", "بى"),
+ (0xFC0A, "M", "بي"),
+ (0xFC0B, "M", "تج"),
+ (0xFC0C, "M", "تح"),
+ (0xFC0D, "M", "تخ"),
+ (0xFC0E, "M", "تم"),
+ (0xFC0F, "M", "تى"),
+ (0xFC10, "M", "تي"),
+ (0xFC11, "M", "ثج"),
+ (0xFC12, "M", "ثم"),
+ (0xFC13, "M", "ثى"),
+ (0xFC14, "M", "ثي"),
+ (0xFC15, "M", "جح"),
+ (0xFC16, "M", "جم"),
+ (0xFC17, "M", "حج"),
+ (0xFC18, "M", "حم"),
+ (0xFC19, "M", "خج"),
+ (0xFC1A, "M", "خح"),
+ (0xFC1B, "M", "خم"),
+ (0xFC1C, "M", "سج"),
+ (0xFC1D, "M", "سح"),
+ (0xFC1E, "M", "سخ"),
+ (0xFC1F, "M", "سم"),
+ (0xFC20, "M", "صح"),
+ (0xFC21, "M", "صم"),
+ (0xFC22, "M", "ضج"),
+ (0xFC23, "M", "ضح"),
+ (0xFC24, "M", "ضخ"),
+ (0xFC25, "M", "ضم"),
+ (0xFC26, "M", "طح"),
+ (0xFC27, "M", "طم"),
+ (0xFC28, "M", "ظم"),
+ (0xFC29, "M", "عج"),
+ (0xFC2A, "M", "عم"),
+ (0xFC2B, "M", "غج"),
+ (0xFC2C, "M", "غم"),
+ (0xFC2D, "M", "فج"),
+ (0xFC2E, "M", "فح"),
+ (0xFC2F, "M", "فخ"),
+ (0xFC30, "M", "فم"),
+ (0xFC31, "M", "فى"),
+ (0xFC32, "M", "في"),
+ (0xFC33, "M", "قح"),
+ ]
+
+
+def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFC34, "M", "قم"),
+ (0xFC35, "M", "قى"),
+ (0xFC36, "M", "قي"),
+ (0xFC37, "M", "كا"),
+ (0xFC38, "M", "كج"),
+ (0xFC39, "M", "كح"),
+ (0xFC3A, "M", "كخ"),
+ (0xFC3B, "M", "كل"),
+ (0xFC3C, "M", "كم"),
+ (0xFC3D, "M", "كى"),
+ (0xFC3E, "M", "كي"),
+ (0xFC3F, "M", "لج"),
+ (0xFC40, "M", "لح"),
+ (0xFC41, "M", "لخ"),
+ (0xFC42, "M", "لم"),
+ (0xFC43, "M", "لى"),
+ (0xFC44, "M", "لي"),
+ (0xFC45, "M", "مج"),
+ (0xFC46, "M", "مح"),
+ (0xFC47, "M", "مخ"),
+ (0xFC48, "M", "مم"),
+ (0xFC49, "M", "مى"),
+ (0xFC4A, "M", "مي"),
+ (0xFC4B, "M", "نج"),
+ (0xFC4C, "M", "نح"),
+ (0xFC4D, "M", "نخ"),
+ (0xFC4E, "M", "نم"),
+ (0xFC4F, "M", "نى"),
+ (0xFC50, "M", "ني"),
+ (0xFC51, "M", "هج"),
+ (0xFC52, "M", "هم"),
+ (0xFC53, "M", "هى"),
+ (0xFC54, "M", "هي"),
+ (0xFC55, "M", "يج"),
+ (0xFC56, "M", "يح"),
+ (0xFC57, "M", "يخ"),
+ (0xFC58, "M", "يم"),
+ (0xFC59, "M", "يى"),
+ (0xFC5A, "M", "يي"),
+ (0xFC5B, "M", "ذٰ"),
+ (0xFC5C, "M", "رٰ"),
+ (0xFC5D, "M", "ىٰ"),
+ (0xFC5E, "M", " ٌّ"),
+ (0xFC5F, "M", " ٍّ"),
+ (0xFC60, "M", " َّ"),
+ (0xFC61, "M", " ُّ"),
+ (0xFC62, "M", " ِّ"),
+ (0xFC63, "M", " ّٰ"),
+ (0xFC64, "M", "ئر"),
+ (0xFC65, "M", "ئز"),
+ (0xFC66, "M", "ئم"),
+ (0xFC67, "M", "ئن"),
+ (0xFC68, "M", "ئى"),
+ (0xFC69, "M", "ئي"),
+ (0xFC6A, "M", "بر"),
+ (0xFC6B, "M", "بز"),
+ (0xFC6C, "M", "بم"),
+ (0xFC6D, "M", "بن"),
+ (0xFC6E, "M", "بى"),
+ (0xFC6F, "M", "بي"),
+ (0xFC70, "M", "تر"),
+ (0xFC71, "M", "تز"),
+ (0xFC72, "M", "تم"),
+ (0xFC73, "M", "تن"),
+ (0xFC74, "M", "تى"),
+ (0xFC75, "M", "تي"),
+ (0xFC76, "M", "ثر"),
+ (0xFC77, "M", "ثز"),
+ (0xFC78, "M", "ثم"),
+ (0xFC79, "M", "ثن"),
+ (0xFC7A, "M", "ثى"),
+ (0xFC7B, "M", "ثي"),
+ (0xFC7C, "M", "فى"),
+ (0xFC7D, "M", "في"),
+ (0xFC7E, "M", "قى"),
+ (0xFC7F, "M", "قي"),
+ (0xFC80, "M", "كا"),
+ (0xFC81, "M", "كل"),
+ (0xFC82, "M", "كم"),
+ (0xFC83, "M", "كى"),
+ (0xFC84, "M", "كي"),
+ (0xFC85, "M", "لم"),
+ (0xFC86, "M", "لى"),
+ (0xFC87, "M", "لي"),
+ (0xFC88, "M", "ما"),
+ (0xFC89, "M", "مم"),
+ (0xFC8A, "M", "نر"),
+ (0xFC8B, "M", "نز"),
+ (0xFC8C, "M", "نم"),
+ (0xFC8D, "M", "نن"),
+ (0xFC8E, "M", "نى"),
+ (0xFC8F, "M", "ني"),
+ (0xFC90, "M", "ىٰ"),
+ (0xFC91, "M", "ير"),
+ (0xFC92, "M", "يز"),
+ (0xFC93, "M", "يم"),
+ (0xFC94, "M", "ين"),
+ (0xFC95, "M", "يى"),
+ (0xFC96, "M", "يي"),
+ (0xFC97, "M", "ئج"),
+ ]
+
+
+def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFC98, "M", "ئح"),
+ (0xFC99, "M", "ئخ"),
+ (0xFC9A, "M", "ئم"),
+ (0xFC9B, "M", "ئه"),
+ (0xFC9C, "M", "بج"),
+ (0xFC9D, "M", "بح"),
+ (0xFC9E, "M", "بخ"),
+ (0xFC9F, "M", "بم"),
+ (0xFCA0, "M", "به"),
+ (0xFCA1, "M", "تج"),
+ (0xFCA2, "M", "تح"),
+ (0xFCA3, "M", "تخ"),
+ (0xFCA4, "M", "تم"),
+ (0xFCA5, "M", "ته"),
+ (0xFCA6, "M", "ثم"),
+ (0xFCA7, "M", "جح"),
+ (0xFCA8, "M", "جم"),
+ (0xFCA9, "M", "حج"),
+ (0xFCAA, "M", "حم"),
+ (0xFCAB, "M", "خج"),
+ (0xFCAC, "M", "خم"),
+ (0xFCAD, "M", "سج"),
+ (0xFCAE, "M", "سح"),
+ (0xFCAF, "M", "سخ"),
+ (0xFCB0, "M", "سم"),
+ (0xFCB1, "M", "صح"),
+ (0xFCB2, "M", "صخ"),
+ (0xFCB3, "M", "صم"),
+ (0xFCB4, "M", "ضج"),
+ (0xFCB5, "M", "ضح"),
+ (0xFCB6, "M", "ضخ"),
+ (0xFCB7, "M", "ضم"),
+ (0xFCB8, "M", "طح"),
+ (0xFCB9, "M", "ظم"),
+ (0xFCBA, "M", "عج"),
+ (0xFCBB, "M", "عم"),
+ (0xFCBC, "M", "غج"),
+ (0xFCBD, "M", "غم"),
+ (0xFCBE, "M", "فج"),
+ (0xFCBF, "M", "فح"),
+ (0xFCC0, "M", "فخ"),
+ (0xFCC1, "M", "فم"),
+ (0xFCC2, "M", "قح"),
+ (0xFCC3, "M", "قم"),
+ (0xFCC4, "M", "كج"),
+ (0xFCC5, "M", "كح"),
+ (0xFCC6, "M", "كخ"),
+ (0xFCC7, "M", "كل"),
+ (0xFCC8, "M", "كم"),
+ (0xFCC9, "M", "لج"),
+ (0xFCCA, "M", "لح"),
+ (0xFCCB, "M", "لخ"),
+ (0xFCCC, "M", "لم"),
+ (0xFCCD, "M", "له"),
+ (0xFCCE, "M", "مج"),
+ (0xFCCF, "M", "مح"),
+ (0xFCD0, "M", "مخ"),
+ (0xFCD1, "M", "مم"),
+ (0xFCD2, "M", "نج"),
+ (0xFCD3, "M", "نح"),
+ (0xFCD4, "M", "نخ"),
+ (0xFCD5, "M", "نم"),
+ (0xFCD6, "M", "نه"),
+ (0xFCD7, "M", "هج"),
+ (0xFCD8, "M", "هم"),
+ (0xFCD9, "M", "هٰ"),
+ (0xFCDA, "M", "يج"),
+ (0xFCDB, "M", "يح"),
+ (0xFCDC, "M", "يخ"),
+ (0xFCDD, "M", "يم"),
+ (0xFCDE, "M", "يه"),
+ (0xFCDF, "M", "ئم"),
+ (0xFCE0, "M", "ئه"),
+ (0xFCE1, "M", "بم"),
+ (0xFCE2, "M", "به"),
+ (0xFCE3, "M", "تم"),
+ (0xFCE4, "M", "ته"),
+ (0xFCE5, "M", "ثم"),
+ (0xFCE6, "M", "ثه"),
+ (0xFCE7, "M", "سم"),
+ (0xFCE8, "M", "سه"),
+ (0xFCE9, "M", "شم"),
+ (0xFCEA, "M", "شه"),
+ (0xFCEB, "M", "كل"),
+ (0xFCEC, "M", "كم"),
+ (0xFCED, "M", "لم"),
+ (0xFCEE, "M", "نم"),
+ (0xFCEF, "M", "نه"),
+ (0xFCF0, "M", "يم"),
+ (0xFCF1, "M", "يه"),
+ (0xFCF2, "M", "ـَّ"),
+ (0xFCF3, "M", "ـُّ"),
+ (0xFCF4, "M", "ـِّ"),
+ (0xFCF5, "M", "طى"),
+ (0xFCF6, "M", "طي"),
+ (0xFCF7, "M", "عى"),
+ (0xFCF8, "M", "عي"),
+ (0xFCF9, "M", "غى"),
+ (0xFCFA, "M", "غي"),
+ (0xFCFB, "M", "سى"),
+ ]
+
+
+def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFCFC, "M", "سي"),
+ (0xFCFD, "M", "شى"),
+ (0xFCFE, "M", "شي"),
+ (0xFCFF, "M", "حى"),
+ (0xFD00, "M", "حي"),
+ (0xFD01, "M", "جى"),
+ (0xFD02, "M", "جي"),
+ (0xFD03, "M", "خى"),
+ (0xFD04, "M", "خي"),
+ (0xFD05, "M", "صى"),
+ (0xFD06, "M", "صي"),
+ (0xFD07, "M", "ضى"),
+ (0xFD08, "M", "ضي"),
+ (0xFD09, "M", "شج"),
+ (0xFD0A, "M", "شح"),
+ (0xFD0B, "M", "شخ"),
+ (0xFD0C, "M", "شم"),
+ (0xFD0D, "M", "شر"),
+ (0xFD0E, "M", "سر"),
+ (0xFD0F, "M", "صر"),
+ (0xFD10, "M", "ضر"),
+ (0xFD11, "M", "طى"),
+ (0xFD12, "M", "طي"),
+ (0xFD13, "M", "عى"),
+ (0xFD14, "M", "عي"),
+ (0xFD15, "M", "غى"),
+ (0xFD16, "M", "غي"),
+ (0xFD17, "M", "سى"),
+ (0xFD18, "M", "سي"),
+ (0xFD19, "M", "شى"),
+ (0xFD1A, "M", "شي"),
+ (0xFD1B, "M", "حى"),
+ (0xFD1C, "M", "حي"),
+ (0xFD1D, "M", "جى"),
+ (0xFD1E, "M", "جي"),
+ (0xFD1F, "M", "خى"),
+ (0xFD20, "M", "خي"),
+ (0xFD21, "M", "صى"),
+ (0xFD22, "M", "صي"),
+ (0xFD23, "M", "ضى"),
+ (0xFD24, "M", "ضي"),
+ (0xFD25, "M", "شج"),
+ (0xFD26, "M", "شح"),
+ (0xFD27, "M", "شخ"),
+ (0xFD28, "M", "شم"),
+ (0xFD29, "M", "شر"),
+ (0xFD2A, "M", "سر"),
+ (0xFD2B, "M", "صر"),
+ (0xFD2C, "M", "ضر"),
+ (0xFD2D, "M", "شج"),
+ (0xFD2E, "M", "شح"),
+ (0xFD2F, "M", "شخ"),
+ (0xFD30, "M", "شم"),
+ (0xFD31, "M", "سه"),
+ (0xFD32, "M", "شه"),
+ (0xFD33, "M", "طم"),
+ (0xFD34, "M", "سج"),
+ (0xFD35, "M", "سح"),
+ (0xFD36, "M", "سخ"),
+ (0xFD37, "M", "شج"),
+ (0xFD38, "M", "شح"),
+ (0xFD39, "M", "شخ"),
+ (0xFD3A, "M", "طم"),
+ (0xFD3B, "M", "ظم"),
+ (0xFD3C, "M", "اً"),
+ (0xFD3E, "V"),
+ (0xFD50, "M", "تجم"),
+ (0xFD51, "M", "تحج"),
+ (0xFD53, "M", "تحم"),
+ (0xFD54, "M", "تخم"),
+ (0xFD55, "M", "تمج"),
+ (0xFD56, "M", "تمح"),
+ (0xFD57, "M", "تمخ"),
+ (0xFD58, "M", "جمح"),
+ (0xFD5A, "M", "حمي"),
+ (0xFD5B, "M", "حمى"),
+ (0xFD5C, "M", "سحج"),
+ (0xFD5D, "M", "سجح"),
+ (0xFD5E, "M", "سجى"),
+ (0xFD5F, "M", "سمح"),
+ (0xFD61, "M", "سمج"),
+ (0xFD62, "M", "سمم"),
+ (0xFD64, "M", "صحح"),
+ (0xFD66, "M", "صمم"),
+ (0xFD67, "M", "شحم"),
+ (0xFD69, "M", "شجي"),
+ (0xFD6A, "M", "شمخ"),
+ (0xFD6C, "M", "شمم"),
+ (0xFD6E, "M", "ضحى"),
+ (0xFD6F, "M", "ضخم"),
+ (0xFD71, "M", "طمح"),
+ (0xFD73, "M", "طمم"),
+ (0xFD74, "M", "طمي"),
+ (0xFD75, "M", "عجم"),
+ (0xFD76, "M", "عمم"),
+ (0xFD78, "M", "عمى"),
+ (0xFD79, "M", "غمم"),
+ (0xFD7A, "M", "غمي"),
+ (0xFD7B, "M", "غمى"),
+ (0xFD7C, "M", "فخم"),
+ ]
+
+
+def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFD7E, "M", "قمح"),
+ (0xFD7F, "M", "قمم"),
+ (0xFD80, "M", "لحم"),
+ (0xFD81, "M", "لحي"),
+ (0xFD82, "M", "لحى"),
+ (0xFD83, "M", "لجج"),
+ (0xFD85, "M", "لخم"),
+ (0xFD87, "M", "لمح"),
+ (0xFD89, "M", "محج"),
+ (0xFD8A, "M", "محم"),
+ (0xFD8B, "M", "محي"),
+ (0xFD8C, "M", "مجح"),
+ (0xFD8D, "M", "مجم"),
+ (0xFD8E, "M", "مخج"),
+ (0xFD8F, "M", "مخم"),
+ (0xFD90, "X"),
+ (0xFD92, "M", "مجخ"),
+ (0xFD93, "M", "همج"),
+ (0xFD94, "M", "همم"),
+ (0xFD95, "M", "نحم"),
+ (0xFD96, "M", "نحى"),
+ (0xFD97, "M", "نجم"),
+ (0xFD99, "M", "نجى"),
+ (0xFD9A, "M", "نمي"),
+ (0xFD9B, "M", "نمى"),
+ (0xFD9C, "M", "يمم"),
+ (0xFD9E, "M", "بخي"),
+ (0xFD9F, "M", "تجي"),
+ (0xFDA0, "M", "تجى"),
+ (0xFDA1, "M", "تخي"),
+ (0xFDA2, "M", "تخى"),
+ (0xFDA3, "M", "تمي"),
+ (0xFDA4, "M", "تمى"),
+ (0xFDA5, "M", "جمي"),
+ (0xFDA6, "M", "جحى"),
+ (0xFDA7, "M", "جمى"),
+ (0xFDA8, "M", "سخى"),
+ (0xFDA9, "M", "صحي"),
+ (0xFDAA, "M", "شحي"),
+ (0xFDAB, "M", "ضحي"),
+ (0xFDAC, "M", "لجي"),
+ (0xFDAD, "M", "لمي"),
+ (0xFDAE, "M", "يحي"),
+ (0xFDAF, "M", "يجي"),
+ (0xFDB0, "M", "يمي"),
+ (0xFDB1, "M", "ممي"),
+ (0xFDB2, "M", "قمي"),
+ (0xFDB3, "M", "نحي"),
+ (0xFDB4, "M", "قمح"),
+ (0xFDB5, "M", "لحم"),
+ (0xFDB6, "M", "عمي"),
+ (0xFDB7, "M", "كمي"),
+ (0xFDB8, "M", "نجح"),
+ (0xFDB9, "M", "مخي"),
+ (0xFDBA, "M", "لجم"),
+ (0xFDBB, "M", "كمم"),
+ (0xFDBC, "M", "لجم"),
+ (0xFDBD, "M", "نجح"),
+ (0xFDBE, "M", "جحي"),
+ (0xFDBF, "M", "حجي"),
+ (0xFDC0, "M", "مجي"),
+ (0xFDC1, "M", "فمي"),
+ (0xFDC2, "M", "بحي"),
+ (0xFDC3, "M", "كمم"),
+ (0xFDC4, "M", "عجم"),
+ (0xFDC5, "M", "صمم"),
+ (0xFDC6, "M", "سخي"),
+ (0xFDC7, "M", "نجي"),
+ (0xFDC8, "X"),
+ (0xFDCF, "V"),
+ (0xFDD0, "X"),
+ (0xFDF0, "M", "صلے"),
+ (0xFDF1, "M", "قلے"),
+ (0xFDF2, "M", "الله"),
+ (0xFDF3, "M", "اكبر"),
+ (0xFDF4, "M", "محمد"),
+ (0xFDF5, "M", "صلعم"),
+ (0xFDF6, "M", "رسول"),
+ (0xFDF7, "M", "عليه"),
+ (0xFDF8, "M", "وسلم"),
+ (0xFDF9, "M", "صلى"),
+ (0xFDFA, "M", "صلى الله عليه وسلم"),
+ (0xFDFB, "M", "جل جلاله"),
+ (0xFDFC, "M", "ریال"),
+ (0xFDFD, "V"),
+ (0xFE00, "I"),
+ (0xFE10, "M", ","),
+ (0xFE11, "M", "、"),
+ (0xFE12, "X"),
+ (0xFE13, "M", ":"),
+ (0xFE14, "M", ";"),
+ (0xFE15, "M", "!"),
+ (0xFE16, "M", "?"),
+ (0xFE17, "M", "〖"),
+ (0xFE18, "M", "〗"),
+ (0xFE19, "X"),
+ (0xFE20, "V"),
+ (0xFE30, "X"),
+ (0xFE31, "M", "—"),
+ (0xFE32, "M", "–"),
+ ]
+
+
+def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFE33, "M", "_"),
+ (0xFE35, "M", "("),
+ (0xFE36, "M", ")"),
+ (0xFE37, "M", "{"),
+ (0xFE38, "M", "}"),
+ (0xFE39, "M", "〔"),
+ (0xFE3A, "M", "〕"),
+ (0xFE3B, "M", "【"),
+ (0xFE3C, "M", "】"),
+ (0xFE3D, "M", "《"),
+ (0xFE3E, "M", "》"),
+ (0xFE3F, "M", "〈"),
+ (0xFE40, "M", "〉"),
+ (0xFE41, "M", "「"),
+ (0xFE42, "M", "」"),
+ (0xFE43, "M", "『"),
+ (0xFE44, "M", "』"),
+ (0xFE45, "V"),
+ (0xFE47, "M", "["),
+ (0xFE48, "M", "]"),
+ (0xFE49, "M", " ̅"),
+ (0xFE4D, "M", "_"),
+ (0xFE50, "M", ","),
+ (0xFE51, "M", "、"),
+ (0xFE52, "X"),
+ (0xFE54, "M", ";"),
+ (0xFE55, "M", ":"),
+ (0xFE56, "M", "?"),
+ (0xFE57, "M", "!"),
+ (0xFE58, "M", "—"),
+ (0xFE59, "M", "("),
+ (0xFE5A, "M", ")"),
+ (0xFE5B, "M", "{"),
+ (0xFE5C, "M", "}"),
+ (0xFE5D, "M", "〔"),
+ (0xFE5E, "M", "〕"),
+ (0xFE5F, "M", "#"),
+ (0xFE60, "M", "&"),
+ (0xFE61, "M", "*"),
+ (0xFE62, "M", "+"),
+ (0xFE63, "M", "-"),
+ (0xFE64, "M", "<"),
+ (0xFE65, "M", ">"),
+ (0xFE66, "M", "="),
+ (0xFE67, "X"),
+ (0xFE68, "M", "\\"),
+ (0xFE69, "M", "$"),
+ (0xFE6A, "M", "%"),
+ (0xFE6B, "M", "@"),
+ (0xFE6C, "X"),
+ (0xFE70, "M", " ً"),
+ (0xFE71, "M", "ـً"),
+ (0xFE72, "M", " ٌ"),
+ (0xFE73, "V"),
+ (0xFE74, "M", " ٍ"),
+ (0xFE75, "X"),
+ (0xFE76, "M", " َ"),
+ (0xFE77, "M", "ـَ"),
+ (0xFE78, "M", " ُ"),
+ (0xFE79, "M", "ـُ"),
+ (0xFE7A, "M", " ِ"),
+ (0xFE7B, "M", "ـِ"),
+ (0xFE7C, "M", " ّ"),
+ (0xFE7D, "M", "ـّ"),
+ (0xFE7E, "M", " ْ"),
+ (0xFE7F, "M", "ـْ"),
+ (0xFE80, "M", "ء"),
+ (0xFE81, "M", "آ"),
+ (0xFE83, "M", "أ"),
+ (0xFE85, "M", "ؤ"),
+ (0xFE87, "M", "إ"),
+ (0xFE89, "M", "ئ"),
+ (0xFE8D, "M", "ا"),
+ (0xFE8F, "M", "ب"),
+ (0xFE93, "M", "ة"),
+ (0xFE95, "M", "ت"),
+ (0xFE99, "M", "ث"),
+ (0xFE9D, "M", "ج"),
+ (0xFEA1, "M", "ح"),
+ (0xFEA5, "M", "خ"),
+ (0xFEA9, "M", "د"),
+ (0xFEAB, "M", "ذ"),
+ (0xFEAD, "M", "ر"),
+ (0xFEAF, "M", "ز"),
+ (0xFEB1, "M", "س"),
+ (0xFEB5, "M", "ش"),
+ (0xFEB9, "M", "ص"),
+ (0xFEBD, "M", "ض"),
+ (0xFEC1, "M", "ط"),
+ (0xFEC5, "M", "ظ"),
+ (0xFEC9, "M", "ع"),
+ (0xFECD, "M", "غ"),
+ (0xFED1, "M", "ف"),
+ (0xFED5, "M", "ق"),
+ (0xFED9, "M", "ك"),
+ (0xFEDD, "M", "ل"),
+ (0xFEE1, "M", "م"),
+ (0xFEE5, "M", "ن"),
+ (0xFEE9, "M", "ه"),
+ (0xFEED, "M", "و"),
+ ]
+
+
+def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFEEF, "M", "ى"),
+ (0xFEF1, "M", "ي"),
+ (0xFEF5, "M", "لآ"),
+ (0xFEF7, "M", "لأ"),
+ (0xFEF9, "M", "لإ"),
+ (0xFEFB, "M", "لا"),
+ (0xFEFD, "X"),
+ (0xFEFF, "I"),
+ (0xFF00, "X"),
+ (0xFF01, "M", "!"),
+ (0xFF02, "M", '"'),
+ (0xFF03, "M", "#"),
+ (0xFF04, "M", "$"),
+ (0xFF05, "M", "%"),
+ (0xFF06, "M", "&"),
+ (0xFF07, "M", "'"),
+ (0xFF08, "M", "("),
+ (0xFF09, "M", ")"),
+ (0xFF0A, "M", "*"),
+ (0xFF0B, "M", "+"),
+ (0xFF0C, "M", ","),
+ (0xFF0D, "M", "-"),
+ (0xFF0E, "M", "."),
+ (0xFF0F, "M", "/"),
+ (0xFF10, "M", "0"),
+ (0xFF11, "M", "1"),
+ (0xFF12, "M", "2"),
+ (0xFF13, "M", "3"),
+ (0xFF14, "M", "4"),
+ (0xFF15, "M", "5"),
+ (0xFF16, "M", "6"),
+ (0xFF17, "M", "7"),
+ (0xFF18, "M", "8"),
+ (0xFF19, "M", "9"),
+ (0xFF1A, "M", ":"),
+ (0xFF1B, "M", ";"),
+ (0xFF1C, "M", "<"),
+ (0xFF1D, "M", "="),
+ (0xFF1E, "M", ">"),
+ (0xFF1F, "M", "?"),
+ (0xFF20, "M", "@"),
+ (0xFF21, "M", "a"),
+ (0xFF22, "M", "b"),
+ (0xFF23, "M", "c"),
+ (0xFF24, "M", "d"),
+ (0xFF25, "M", "e"),
+ (0xFF26, "M", "f"),
+ (0xFF27, "M", "g"),
+ (0xFF28, "M", "h"),
+ (0xFF29, "M", "i"),
+ (0xFF2A, "M", "j"),
+ (0xFF2B, "M", "k"),
+ (0xFF2C, "M", "l"),
+ (0xFF2D, "M", "m"),
+ (0xFF2E, "M", "n"),
+ (0xFF2F, "M", "o"),
+ (0xFF30, "M", "p"),
+ (0xFF31, "M", "q"),
+ (0xFF32, "M", "r"),
+ (0xFF33, "M", "s"),
+ (0xFF34, "M", "t"),
+ (0xFF35, "M", "u"),
+ (0xFF36, "M", "v"),
+ (0xFF37, "M", "w"),
+ (0xFF38, "M", "x"),
+ (0xFF39, "M", "y"),
+ (0xFF3A, "M", "z"),
+ (0xFF3B, "M", "["),
+ (0xFF3C, "M", "\\"),
+ (0xFF3D, "M", "]"),
+ (0xFF3E, "M", "^"),
+ (0xFF3F, "M", "_"),
+ (0xFF40, "M", "`"),
+ (0xFF41, "M", "a"),
+ (0xFF42, "M", "b"),
+ (0xFF43, "M", "c"),
+ (0xFF44, "M", "d"),
+ (0xFF45, "M", "e"),
+ (0xFF46, "M", "f"),
+ (0xFF47, "M", "g"),
+ (0xFF48, "M", "h"),
+ (0xFF49, "M", "i"),
+ (0xFF4A, "M", "j"),
+ (0xFF4B, "M", "k"),
+ (0xFF4C, "M", "l"),
+ (0xFF4D, "M", "m"),
+ (0xFF4E, "M", "n"),
+ (0xFF4F, "M", "o"),
+ (0xFF50, "M", "p"),
+ (0xFF51, "M", "q"),
+ (0xFF52, "M", "r"),
+ (0xFF53, "M", "s"),
+ (0xFF54, "M", "t"),
+ (0xFF55, "M", "u"),
+ (0xFF56, "M", "v"),
+ (0xFF57, "M", "w"),
+ (0xFF58, "M", "x"),
+ (0xFF59, "M", "y"),
+ (0xFF5A, "M", "z"),
+ (0xFF5B, "M", "{"),
+ ]
+
+
+def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFF5C, "M", "|"),
+ (0xFF5D, "M", "}"),
+ (0xFF5E, "M", "~"),
+ (0xFF5F, "M", "⦅"),
+ (0xFF60, "M", "⦆"),
+ (0xFF61, "M", "."),
+ (0xFF62, "M", "「"),
+ (0xFF63, "M", "」"),
+ (0xFF64, "M", "、"),
+ (0xFF65, "M", "・"),
+ (0xFF66, "M", "ヲ"),
+ (0xFF67, "M", "ァ"),
+ (0xFF68, "M", "ィ"),
+ (0xFF69, "M", "ゥ"),
+ (0xFF6A, "M", "ェ"),
+ (0xFF6B, "M", "ォ"),
+ (0xFF6C, "M", "ャ"),
+ (0xFF6D, "M", "ュ"),
+ (0xFF6E, "M", "ョ"),
+ (0xFF6F, "M", "ッ"),
+ (0xFF70, "M", "ー"),
+ (0xFF71, "M", "ア"),
+ (0xFF72, "M", "イ"),
+ (0xFF73, "M", "ウ"),
+ (0xFF74, "M", "エ"),
+ (0xFF75, "M", "オ"),
+ (0xFF76, "M", "カ"),
+ (0xFF77, "M", "キ"),
+ (0xFF78, "M", "ク"),
+ (0xFF79, "M", "ケ"),
+ (0xFF7A, "M", "コ"),
+ (0xFF7B, "M", "サ"),
+ (0xFF7C, "M", "シ"),
+ (0xFF7D, "M", "ス"),
+ (0xFF7E, "M", "セ"),
+ (0xFF7F, "M", "ソ"),
+ (0xFF80, "M", "タ"),
+ (0xFF81, "M", "チ"),
+ (0xFF82, "M", "ツ"),
+ (0xFF83, "M", "テ"),
+ (0xFF84, "M", "ト"),
+ (0xFF85, "M", "ナ"),
+ (0xFF86, "M", "ニ"),
+ (0xFF87, "M", "ヌ"),
+ (0xFF88, "M", "ネ"),
+ (0xFF89, "M", "ノ"),
+ (0xFF8A, "M", "ハ"),
+ (0xFF8B, "M", "ヒ"),
+ (0xFF8C, "M", "フ"),
+ (0xFF8D, "M", "ヘ"),
+ (0xFF8E, "M", "ホ"),
+ (0xFF8F, "M", "マ"),
+ (0xFF90, "M", "ミ"),
+ (0xFF91, "M", "ム"),
+ (0xFF92, "M", "メ"),
+ (0xFF93, "M", "モ"),
+ (0xFF94, "M", "ヤ"),
+ (0xFF95, "M", "ユ"),
+ (0xFF96, "M", "ヨ"),
+ (0xFF97, "M", "ラ"),
+ (0xFF98, "M", "リ"),
+ (0xFF99, "M", "ル"),
+ (0xFF9A, "M", "レ"),
+ (0xFF9B, "M", "ロ"),
+ (0xFF9C, "M", "ワ"),
+ (0xFF9D, "M", "ン"),
+ (0xFF9E, "M", "゙"),
+ (0xFF9F, "M", "゚"),
+ (0xFFA0, "I"),
+ (0xFFA1, "M", "ᄀ"),
+ (0xFFA2, "M", "ᄁ"),
+ (0xFFA3, "M", "ᆪ"),
+ (0xFFA4, "M", "ᄂ"),
+ (0xFFA5, "M", "ᆬ"),
+ (0xFFA6, "M", "ᆭ"),
+ (0xFFA7, "M", "ᄃ"),
+ (0xFFA8, "M", "ᄄ"),
+ (0xFFA9, "M", "ᄅ"),
+ (0xFFAA, "M", "ᆰ"),
+ (0xFFAB, "M", "ᆱ"),
+ (0xFFAC, "M", "ᆲ"),
+ (0xFFAD, "M", "ᆳ"),
+ (0xFFAE, "M", "ᆴ"),
+ (0xFFAF, "M", "ᆵ"),
+ (0xFFB0, "M", "ᄚ"),
+ (0xFFB1, "M", "ᄆ"),
+ (0xFFB2, "M", "ᄇ"),
+ (0xFFB3, "M", "ᄈ"),
+ (0xFFB4, "M", "ᄡ"),
+ (0xFFB5, "M", "ᄉ"),
+ (0xFFB6, "M", "ᄊ"),
+ (0xFFB7, "M", "ᄋ"),
+ (0xFFB8, "M", "ᄌ"),
+ (0xFFB9, "M", "ᄍ"),
+ (0xFFBA, "M", "ᄎ"),
+ (0xFFBB, "M", "ᄏ"),
+ (0xFFBC, "M", "ᄐ"),
+ (0xFFBD, "M", "ᄑ"),
+ (0xFFBE, "M", "ᄒ"),
+ (0xFFBF, "X"),
+ ]
+
+
+def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFFC2, "M", "ᅡ"),
+ (0xFFC3, "M", "ᅢ"),
+ (0xFFC4, "M", "ᅣ"),
+ (0xFFC5, "M", "ᅤ"),
+ (0xFFC6, "M", "ᅥ"),
+ (0xFFC7, "M", "ᅦ"),
+ (0xFFC8, "X"),
+ (0xFFCA, "M", "ᅧ"),
+ (0xFFCB, "M", "ᅨ"),
+ (0xFFCC, "M", "ᅩ"),
+ (0xFFCD, "M", "ᅪ"),
+ (0xFFCE, "M", "ᅫ"),
+ (0xFFCF, "M", "ᅬ"),
+ (0xFFD0, "X"),
+ (0xFFD2, "M", "ᅭ"),
+ (0xFFD3, "M", "ᅮ"),
+ (0xFFD4, "M", "ᅯ"),
+ (0xFFD5, "M", "ᅰ"),
+ (0xFFD6, "M", "ᅱ"),
+ (0xFFD7, "M", "ᅲ"),
+ (0xFFD8, "X"),
+ (0xFFDA, "M", "ᅳ"),
+ (0xFFDB, "M", "ᅴ"),
+ (0xFFDC, "M", "ᅵ"),
+ (0xFFDD, "X"),
+ (0xFFE0, "M", "¢"),
+ (0xFFE1, "M", "£"),
+ (0xFFE2, "M", "¬"),
+ (0xFFE3, "M", " ̄"),
+ (0xFFE4, "M", "¦"),
+ (0xFFE5, "M", "¥"),
+ (0xFFE6, "M", "₩"),
+ (0xFFE7, "X"),
+ (0xFFE8, "M", "│"),
+ (0xFFE9, "M", "←"),
+ (0xFFEA, "M", "↑"),
+ (0xFFEB, "M", "→"),
+ (0xFFEC, "M", "↓"),
+ (0xFFED, "M", "■"),
+ (0xFFEE, "M", "○"),
+ (0xFFEF, "X"),
+ (0x10000, "V"),
+ (0x1000C, "X"),
+ (0x1000D, "V"),
+ (0x10027, "X"),
+ (0x10028, "V"),
+ (0x1003B, "X"),
+ (0x1003C, "V"),
+ (0x1003E, "X"),
+ (0x1003F, "V"),
+ (0x1004E, "X"),
+ (0x10050, "V"),
+ (0x1005E, "X"),
+ (0x10080, "V"),
+ (0x100FB, "X"),
+ (0x10100, "V"),
+ (0x10103, "X"),
+ (0x10107, "V"),
+ (0x10134, "X"),
+ (0x10137, "V"),
+ (0x1018F, "X"),
+ (0x10190, "V"),
+ (0x1019D, "X"),
+ (0x101A0, "V"),
+ (0x101A1, "X"),
+ (0x101D0, "V"),
+ (0x101FE, "X"),
+ (0x10280, "V"),
+ (0x1029D, "X"),
+ (0x102A0, "V"),
+ (0x102D1, "X"),
+ (0x102E0, "V"),
+ (0x102FC, "X"),
+ (0x10300, "V"),
+ (0x10324, "X"),
+ (0x1032D, "V"),
+ (0x1034B, "X"),
+ (0x10350, "V"),
+ (0x1037B, "X"),
+ (0x10380, "V"),
+ (0x1039E, "X"),
+ (0x1039F, "V"),
+ (0x103C4, "X"),
+ (0x103C8, "V"),
+ (0x103D6, "X"),
+ (0x10400, "M", "𐐨"),
+ (0x10401, "M", "𐐩"),
+ (0x10402, "M", "𐐪"),
+ (0x10403, "M", "𐐫"),
+ (0x10404, "M", "𐐬"),
+ (0x10405, "M", "𐐭"),
+ (0x10406, "M", "𐐮"),
+ (0x10407, "M", "𐐯"),
+ (0x10408, "M", "𐐰"),
+ (0x10409, "M", "𐐱"),
+ (0x1040A, "M", "𐐲"),
+ (0x1040B, "M", "𐐳"),
+ (0x1040C, "M", "𐐴"),
+ (0x1040D, "M", "𐐵"),
+ (0x1040E, "M", "𐐶"),
+ ]
+
+
+def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1040F, "M", "𐐷"),
+ (0x10410, "M", "𐐸"),
+ (0x10411, "M", "𐐹"),
+ (0x10412, "M", "𐐺"),
+ (0x10413, "M", "𐐻"),
+ (0x10414, "M", "𐐼"),
+ (0x10415, "M", "𐐽"),
+ (0x10416, "M", "𐐾"),
+ (0x10417, "M", "𐐿"),
+ (0x10418, "M", "𐑀"),
+ (0x10419, "M", "𐑁"),
+ (0x1041A, "M", "𐑂"),
+ (0x1041B, "M", "𐑃"),
+ (0x1041C, "M", "𐑄"),
+ (0x1041D, "M", "𐑅"),
+ (0x1041E, "M", "𐑆"),
+ (0x1041F, "M", "𐑇"),
+ (0x10420, "M", "𐑈"),
+ (0x10421, "M", "𐑉"),
+ (0x10422, "M", "𐑊"),
+ (0x10423, "M", "𐑋"),
+ (0x10424, "M", "𐑌"),
+ (0x10425, "M", "𐑍"),
+ (0x10426, "M", "𐑎"),
+ (0x10427, "M", "𐑏"),
+ (0x10428, "V"),
+ (0x1049E, "X"),
+ (0x104A0, "V"),
+ (0x104AA, "X"),
+ (0x104B0, "M", "𐓘"),
+ (0x104B1, "M", "𐓙"),
+ (0x104B2, "M", "𐓚"),
+ (0x104B3, "M", "𐓛"),
+ (0x104B4, "M", "𐓜"),
+ (0x104B5, "M", "𐓝"),
+ (0x104B6, "M", "𐓞"),
+ (0x104B7, "M", "𐓟"),
+ (0x104B8, "M", "𐓠"),
+ (0x104B9, "M", "𐓡"),
+ (0x104BA, "M", "𐓢"),
+ (0x104BB, "M", "𐓣"),
+ (0x104BC, "M", "𐓤"),
+ (0x104BD, "M", "𐓥"),
+ (0x104BE, "M", "𐓦"),
+ (0x104BF, "M", "𐓧"),
+ (0x104C0, "M", "𐓨"),
+ (0x104C1, "M", "𐓩"),
+ (0x104C2, "M", "𐓪"),
+ (0x104C3, "M", "𐓫"),
+ (0x104C4, "M", "𐓬"),
+ (0x104C5, "M", "𐓭"),
+ (0x104C6, "M", "𐓮"),
+ (0x104C7, "M", "𐓯"),
+ (0x104C8, "M", "𐓰"),
+ (0x104C9, "M", "𐓱"),
+ (0x104CA, "M", "𐓲"),
+ (0x104CB, "M", "𐓳"),
+ (0x104CC, "M", "𐓴"),
+ (0x104CD, "M", "𐓵"),
+ (0x104CE, "M", "𐓶"),
+ (0x104CF, "M", "𐓷"),
+ (0x104D0, "M", "𐓸"),
+ (0x104D1, "M", "𐓹"),
+ (0x104D2, "M", "𐓺"),
+ (0x104D3, "M", "𐓻"),
+ (0x104D4, "X"),
+ (0x104D8, "V"),
+ (0x104FC, "X"),
+ (0x10500, "V"),
+ (0x10528, "X"),
+ (0x10530, "V"),
+ (0x10564, "X"),
+ (0x1056F, "V"),
+ (0x10570, "M", "𐖗"),
+ (0x10571, "M", "𐖘"),
+ (0x10572, "M", "𐖙"),
+ (0x10573, "M", "𐖚"),
+ (0x10574, "M", "𐖛"),
+ (0x10575, "M", "𐖜"),
+ (0x10576, "M", "𐖝"),
+ (0x10577, "M", "𐖞"),
+ (0x10578, "M", "𐖟"),
+ (0x10579, "M", "𐖠"),
+ (0x1057A, "M", "𐖡"),
+ (0x1057B, "X"),
+ (0x1057C, "M", "𐖣"),
+ (0x1057D, "M", "𐖤"),
+ (0x1057E, "M", "𐖥"),
+ (0x1057F, "M", "𐖦"),
+ (0x10580, "M", "𐖧"),
+ (0x10581, "M", "𐖨"),
+ (0x10582, "M", "𐖩"),
+ (0x10583, "M", "𐖪"),
+ (0x10584, "M", "𐖫"),
+ (0x10585, "M", "𐖬"),
+ (0x10586, "M", "𐖭"),
+ (0x10587, "M", "𐖮"),
+ (0x10588, "M", "𐖯"),
+ (0x10589, "M", "𐖰"),
+ (0x1058A, "M", "𐖱"),
+ ]
+
+
+def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1058B, "X"),
+ (0x1058C, "M", "𐖳"),
+ (0x1058D, "M", "𐖴"),
+ (0x1058E, "M", "𐖵"),
+ (0x1058F, "M", "𐖶"),
+ (0x10590, "M", "𐖷"),
+ (0x10591, "M", "𐖸"),
+ (0x10592, "M", "𐖹"),
+ (0x10593, "X"),
+ (0x10594, "M", "𐖻"),
+ (0x10595, "M", "𐖼"),
+ (0x10596, "X"),
+ (0x10597, "V"),
+ (0x105A2, "X"),
+ (0x105A3, "V"),
+ (0x105B2, "X"),
+ (0x105B3, "V"),
+ (0x105BA, "X"),
+ (0x105BB, "V"),
+ (0x105BD, "X"),
+ (0x105C0, "V"),
+ (0x105F4, "X"),
+ (0x10600, "V"),
+ (0x10737, "X"),
+ (0x10740, "V"),
+ (0x10756, "X"),
+ (0x10760, "V"),
+ (0x10768, "X"),
+ (0x10780, "V"),
+ (0x10781, "M", "ː"),
+ (0x10782, "M", "ˑ"),
+ (0x10783, "M", "æ"),
+ (0x10784, "M", "ʙ"),
+ (0x10785, "M", "ɓ"),
+ (0x10786, "X"),
+ (0x10787, "M", "ʣ"),
+ (0x10788, "M", "ꭦ"),
+ (0x10789, "M", "ʥ"),
+ (0x1078A, "M", "ʤ"),
+ (0x1078B, "M", "ɖ"),
+ (0x1078C, "M", "ɗ"),
+ (0x1078D, "M", "ᶑ"),
+ (0x1078E, "M", "ɘ"),
+ (0x1078F, "M", "ɞ"),
+ (0x10790, "M", "ʩ"),
+ (0x10791, "M", "ɤ"),
+ (0x10792, "M", "ɢ"),
+ (0x10793, "M", "ɠ"),
+ (0x10794, "M", "ʛ"),
+ (0x10795, "M", "ħ"),
+ (0x10796, "M", "ʜ"),
+ (0x10797, "M", "ɧ"),
+ (0x10798, "M", "ʄ"),
+ (0x10799, "M", "ʪ"),
+ (0x1079A, "M", "ʫ"),
+ (0x1079B, "M", "ɬ"),
+ (0x1079C, "M", "𝼄"),
+ (0x1079D, "M", "ꞎ"),
+ (0x1079E, "M", "ɮ"),
+ (0x1079F, "M", "𝼅"),
+ (0x107A0, "M", "ʎ"),
+ (0x107A1, "M", "𝼆"),
+ (0x107A2, "M", "ø"),
+ (0x107A3, "M", "ɶ"),
+ (0x107A4, "M", "ɷ"),
+ (0x107A5, "M", "q"),
+ (0x107A6, "M", "ɺ"),
+ (0x107A7, "M", "𝼈"),
+ (0x107A8, "M", "ɽ"),
+ (0x107A9, "M", "ɾ"),
+ (0x107AA, "M", "ʀ"),
+ (0x107AB, "M", "ʨ"),
+ (0x107AC, "M", "ʦ"),
+ (0x107AD, "M", "ꭧ"),
+ (0x107AE, "M", "ʧ"),
+ (0x107AF, "M", "ʈ"),
+ (0x107B0, "M", "ⱱ"),
+ (0x107B1, "X"),
+ (0x107B2, "M", "ʏ"),
+ (0x107B3, "M", "ʡ"),
+ (0x107B4, "M", "ʢ"),
+ (0x107B5, "M", "ʘ"),
+ (0x107B6, "M", "ǀ"),
+ (0x107B7, "M", "ǁ"),
+ (0x107B8, "M", "ǂ"),
+ (0x107B9, "M", "𝼊"),
+ (0x107BA, "M", "𝼞"),
+ (0x107BB, "X"),
+ (0x10800, "V"),
+ (0x10806, "X"),
+ (0x10808, "V"),
+ (0x10809, "X"),
+ (0x1080A, "V"),
+ (0x10836, "X"),
+ (0x10837, "V"),
+ (0x10839, "X"),
+ (0x1083C, "V"),
+ (0x1083D, "X"),
+ (0x1083F, "V"),
+ (0x10856, "X"),
+ ]
+
+
+def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x10857, "V"),
+ (0x1089F, "X"),
+ (0x108A7, "V"),
+ (0x108B0, "X"),
+ (0x108E0, "V"),
+ (0x108F3, "X"),
+ (0x108F4, "V"),
+ (0x108F6, "X"),
+ (0x108FB, "V"),
+ (0x1091C, "X"),
+ (0x1091F, "V"),
+ (0x1093A, "X"),
+ (0x1093F, "V"),
+ (0x10940, "X"),
+ (0x10980, "V"),
+ (0x109B8, "X"),
+ (0x109BC, "V"),
+ (0x109D0, "X"),
+ (0x109D2, "V"),
+ (0x10A04, "X"),
+ (0x10A05, "V"),
+ (0x10A07, "X"),
+ (0x10A0C, "V"),
+ (0x10A14, "X"),
+ (0x10A15, "V"),
+ (0x10A18, "X"),
+ (0x10A19, "V"),
+ (0x10A36, "X"),
+ (0x10A38, "V"),
+ (0x10A3B, "X"),
+ (0x10A3F, "V"),
+ (0x10A49, "X"),
+ (0x10A50, "V"),
+ (0x10A59, "X"),
+ (0x10A60, "V"),
+ (0x10AA0, "X"),
+ (0x10AC0, "V"),
+ (0x10AE7, "X"),
+ (0x10AEB, "V"),
+ (0x10AF7, "X"),
+ (0x10B00, "V"),
+ (0x10B36, "X"),
+ (0x10B39, "V"),
+ (0x10B56, "X"),
+ (0x10B58, "V"),
+ (0x10B73, "X"),
+ (0x10B78, "V"),
+ (0x10B92, "X"),
+ (0x10B99, "V"),
+ (0x10B9D, "X"),
+ (0x10BA9, "V"),
+ (0x10BB0, "X"),
+ (0x10C00, "V"),
+ (0x10C49, "X"),
+ (0x10C80, "M", "𐳀"),
+ (0x10C81, "M", "𐳁"),
+ (0x10C82, "M", "𐳂"),
+ (0x10C83, "M", "𐳃"),
+ (0x10C84, "M", "𐳄"),
+ (0x10C85, "M", "𐳅"),
+ (0x10C86, "M", "𐳆"),
+ (0x10C87, "M", "𐳇"),
+ (0x10C88, "M", "𐳈"),
+ (0x10C89, "M", "𐳉"),
+ (0x10C8A, "M", "𐳊"),
+ (0x10C8B, "M", "𐳋"),
+ (0x10C8C, "M", "𐳌"),
+ (0x10C8D, "M", "𐳍"),
+ (0x10C8E, "M", "𐳎"),
+ (0x10C8F, "M", "𐳏"),
+ (0x10C90, "M", "𐳐"),
+ (0x10C91, "M", "𐳑"),
+ (0x10C92, "M", "𐳒"),
+ (0x10C93, "M", "𐳓"),
+ (0x10C94, "M", "𐳔"),
+ (0x10C95, "M", "𐳕"),
+ (0x10C96, "M", "𐳖"),
+ (0x10C97, "M", "𐳗"),
+ (0x10C98, "M", "𐳘"),
+ (0x10C99, "M", "𐳙"),
+ (0x10C9A, "M", "𐳚"),
+ (0x10C9B, "M", "𐳛"),
+ (0x10C9C, "M", "𐳜"),
+ (0x10C9D, "M", "𐳝"),
+ (0x10C9E, "M", "𐳞"),
+ (0x10C9F, "M", "𐳟"),
+ (0x10CA0, "M", "𐳠"),
+ (0x10CA1, "M", "𐳡"),
+ (0x10CA2, "M", "𐳢"),
+ (0x10CA3, "M", "𐳣"),
+ (0x10CA4, "M", "𐳤"),
+ (0x10CA5, "M", "𐳥"),
+ (0x10CA6, "M", "𐳦"),
+ (0x10CA7, "M", "𐳧"),
+ (0x10CA8, "M", "𐳨"),
+ (0x10CA9, "M", "𐳩"),
+ (0x10CAA, "M", "𐳪"),
+ (0x10CAB, "M", "𐳫"),
+ (0x10CAC, "M", "𐳬"),
+ (0x10CAD, "M", "𐳭"),
+ ]
+
+
+def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x10CAE, "M", "𐳮"),
+ (0x10CAF, "M", "𐳯"),
+ (0x10CB0, "M", "𐳰"),
+ (0x10CB1, "M", "𐳱"),
+ (0x10CB2, "M", "𐳲"),
+ (0x10CB3, "X"),
+ (0x10CC0, "V"),
+ (0x10CF3, "X"),
+ (0x10CFA, "V"),
+ (0x10D28, "X"),
+ (0x10D30, "V"),
+ (0x10D3A, "X"),
+ (0x10D40, "V"),
+ (0x10D50, "M", ""),
+ (0x10D51, "M", ""),
+ (0x10D52, "M", ""),
+ (0x10D53, "M", ""),
+ (0x10D54, "M", ""),
+ (0x10D55, "M", ""),
+ (0x10D56, "M", ""),
+ (0x10D57, "M", ""),
+ (0x10D58, "M", ""),
+ (0x10D59, "M", ""),
+ (0x10D5A, "M", ""),
+ (0x10D5B, "M", ""),
+ (0x10D5C, "M", ""),
+ (0x10D5D, "M", ""),
+ (0x10D5E, "M", ""),
+ (0x10D5F, "M", ""),
+ (0x10D60, "M", ""),
+ (0x10D61, "M", ""),
+ (0x10D62, "M", ""),
+ (0x10D63, "M", ""),
+ (0x10D64, "M", ""),
+ (0x10D65, "M", ""),
+ (0x10D66, "X"),
+ (0x10D69, "V"),
+ (0x10D86, "X"),
+ (0x10D8E, "V"),
+ (0x10D90, "X"),
+ (0x10E60, "V"),
+ (0x10E7F, "X"),
+ (0x10E80, "V"),
+ (0x10EAA, "X"),
+ (0x10EAB, "V"),
+ (0x10EAE, "X"),
+ (0x10EB0, "V"),
+ (0x10EB2, "X"),
+ (0x10EC2, "V"),
+ (0x10EC5, "X"),
+ (0x10EFC, "V"),
+ (0x10F28, "X"),
+ (0x10F30, "V"),
+ (0x10F5A, "X"),
+ (0x10F70, "V"),
+ (0x10F8A, "X"),
+ (0x10FB0, "V"),
+ (0x10FCC, "X"),
+ (0x10FE0, "V"),
+ (0x10FF7, "X"),
+ (0x11000, "V"),
+ (0x1104E, "X"),
+ (0x11052, "V"),
+ (0x11076, "X"),
+ (0x1107F, "V"),
+ (0x110BD, "X"),
+ (0x110BE, "V"),
+ (0x110C3, "X"),
+ (0x110D0, "V"),
+ (0x110E9, "X"),
+ (0x110F0, "V"),
+ (0x110FA, "X"),
+ (0x11100, "V"),
+ (0x11135, "X"),
+ (0x11136, "V"),
+ (0x11148, "X"),
+ (0x11150, "V"),
+ (0x11177, "X"),
+ (0x11180, "V"),
+ (0x111E0, "X"),
+ (0x111E1, "V"),
+ (0x111F5, "X"),
+ (0x11200, "V"),
+ (0x11212, "X"),
+ (0x11213, "V"),
+ (0x11242, "X"),
+ (0x11280, "V"),
+ (0x11287, "X"),
+ (0x11288, "V"),
+ (0x11289, "X"),
+ (0x1128A, "V"),
+ (0x1128E, "X"),
+ (0x1128F, "V"),
+ (0x1129E, "X"),
+ (0x1129F, "V"),
+ (0x112AA, "X"),
+ (0x112B0, "V"),
+ (0x112EB, "X"),
+ (0x112F0, "V"),
+ (0x112FA, "X"),
+ ]
+
+
+def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x11300, "V"),
+ (0x11304, "X"),
+ (0x11305, "V"),
+ (0x1130D, "X"),
+ (0x1130F, "V"),
+ (0x11311, "X"),
+ (0x11313, "V"),
+ (0x11329, "X"),
+ (0x1132A, "V"),
+ (0x11331, "X"),
+ (0x11332, "V"),
+ (0x11334, "X"),
+ (0x11335, "V"),
+ (0x1133A, "X"),
+ (0x1133B, "V"),
+ (0x11345, "X"),
+ (0x11347, "V"),
+ (0x11349, "X"),
+ (0x1134B, "V"),
+ (0x1134E, "X"),
+ (0x11350, "V"),
+ (0x11351, "X"),
+ (0x11357, "V"),
+ (0x11358, "X"),
+ (0x1135D, "V"),
+ (0x11364, "X"),
+ (0x11366, "V"),
+ (0x1136D, "X"),
+ (0x11370, "V"),
+ (0x11375, "X"),
+ (0x11380, "V"),
+ (0x1138A, "X"),
+ (0x1138B, "V"),
+ (0x1138C, "X"),
+ (0x1138E, "V"),
+ (0x1138F, "X"),
+ (0x11390, "V"),
+ (0x113B6, "X"),
+ (0x113B7, "V"),
+ (0x113C1, "X"),
+ (0x113C2, "V"),
+ (0x113C3, "X"),
+ (0x113C5, "V"),
+ (0x113C6, "X"),
+ (0x113C7, "V"),
+ (0x113CB, "X"),
+ (0x113CC, "V"),
+ (0x113D6, "X"),
+ (0x113D7, "V"),
+ (0x113D9, "X"),
+ (0x113E1, "V"),
+ (0x113E3, "X"),
+ (0x11400, "V"),
+ (0x1145C, "X"),
+ (0x1145D, "V"),
+ (0x11462, "X"),
+ (0x11480, "V"),
+ (0x114C8, "X"),
+ (0x114D0, "V"),
+ (0x114DA, "X"),
+ (0x11580, "V"),
+ (0x115B6, "X"),
+ (0x115B8, "V"),
+ (0x115DE, "X"),
+ (0x11600, "V"),
+ (0x11645, "X"),
+ (0x11650, "V"),
+ (0x1165A, "X"),
+ (0x11660, "V"),
+ (0x1166D, "X"),
+ (0x11680, "V"),
+ (0x116BA, "X"),
+ (0x116C0, "V"),
+ (0x116CA, "X"),
+ (0x116D0, "V"),
+ (0x116E4, "X"),
+ (0x11700, "V"),
+ (0x1171B, "X"),
+ (0x1171D, "V"),
+ (0x1172C, "X"),
+ (0x11730, "V"),
+ (0x11747, "X"),
+ (0x11800, "V"),
+ (0x1183C, "X"),
+ (0x118A0, "M", "𑣀"),
+ (0x118A1, "M", "𑣁"),
+ (0x118A2, "M", "𑣂"),
+ (0x118A3, "M", "𑣃"),
+ (0x118A4, "M", "𑣄"),
+ (0x118A5, "M", "𑣅"),
+ (0x118A6, "M", "𑣆"),
+ (0x118A7, "M", "𑣇"),
+ (0x118A8, "M", "𑣈"),
+ (0x118A9, "M", "𑣉"),
+ (0x118AA, "M", "𑣊"),
+ (0x118AB, "M", "𑣋"),
+ (0x118AC, "M", "𑣌"),
+ (0x118AD, "M", "𑣍"),
+ (0x118AE, "M", "𑣎"),
+ (0x118AF, "M", "𑣏"),
+ ]
+
+
+def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x118B0, "M", "𑣐"),
+ (0x118B1, "M", "𑣑"),
+ (0x118B2, "M", "𑣒"),
+ (0x118B3, "M", "𑣓"),
+ (0x118B4, "M", "𑣔"),
+ (0x118B5, "M", "𑣕"),
+ (0x118B6, "M", "𑣖"),
+ (0x118B7, "M", "𑣗"),
+ (0x118B8, "M", "𑣘"),
+ (0x118B9, "M", "𑣙"),
+ (0x118BA, "M", "𑣚"),
+ (0x118BB, "M", "𑣛"),
+ (0x118BC, "M", "𑣜"),
+ (0x118BD, "M", "𑣝"),
+ (0x118BE, "M", "𑣞"),
+ (0x118BF, "M", "𑣟"),
+ (0x118C0, "V"),
+ (0x118F3, "X"),
+ (0x118FF, "V"),
+ (0x11907, "X"),
+ (0x11909, "V"),
+ (0x1190A, "X"),
+ (0x1190C, "V"),
+ (0x11914, "X"),
+ (0x11915, "V"),
+ (0x11917, "X"),
+ (0x11918, "V"),
+ (0x11936, "X"),
+ (0x11937, "V"),
+ (0x11939, "X"),
+ (0x1193B, "V"),
+ (0x11947, "X"),
+ (0x11950, "V"),
+ (0x1195A, "X"),
+ (0x119A0, "V"),
+ (0x119A8, "X"),
+ (0x119AA, "V"),
+ (0x119D8, "X"),
+ (0x119DA, "V"),
+ (0x119E5, "X"),
+ (0x11A00, "V"),
+ (0x11A48, "X"),
+ (0x11A50, "V"),
+ (0x11AA3, "X"),
+ (0x11AB0, "V"),
+ (0x11AF9, "X"),
+ (0x11B00, "V"),
+ (0x11B0A, "X"),
+ (0x11BC0, "V"),
+ (0x11BE2, "X"),
+ (0x11BF0, "V"),
+ (0x11BFA, "X"),
+ (0x11C00, "V"),
+ (0x11C09, "X"),
+ (0x11C0A, "V"),
+ (0x11C37, "X"),
+ (0x11C38, "V"),
+ (0x11C46, "X"),
+ (0x11C50, "V"),
+ (0x11C6D, "X"),
+ (0x11C70, "V"),
+ (0x11C90, "X"),
+ (0x11C92, "V"),
+ (0x11CA8, "X"),
+ (0x11CA9, "V"),
+ (0x11CB7, "X"),
+ (0x11D00, "V"),
+ (0x11D07, "X"),
+ (0x11D08, "V"),
+ (0x11D0A, "X"),
+ (0x11D0B, "V"),
+ (0x11D37, "X"),
+ (0x11D3A, "V"),
+ (0x11D3B, "X"),
+ (0x11D3C, "V"),
+ (0x11D3E, "X"),
+ (0x11D3F, "V"),
+ (0x11D48, "X"),
+ (0x11D50, "V"),
+ (0x11D5A, "X"),
+ (0x11D60, "V"),
+ (0x11D66, "X"),
+ (0x11D67, "V"),
+ (0x11D69, "X"),
+ (0x11D6A, "V"),
+ (0x11D8F, "X"),
+ (0x11D90, "V"),
+ (0x11D92, "X"),
+ (0x11D93, "V"),
+ (0x11D99, "X"),
+ (0x11DA0, "V"),
+ (0x11DAA, "X"),
+ (0x11EE0, "V"),
+ (0x11EF9, "X"),
+ (0x11F00, "V"),
+ (0x11F11, "X"),
+ (0x11F12, "V"),
+ (0x11F3B, "X"),
+ (0x11F3E, "V"),
+ (0x11F5B, "X"),
+ ]
+
+
+def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x11FB0, "V"),
+ (0x11FB1, "X"),
+ (0x11FC0, "V"),
+ (0x11FF2, "X"),
+ (0x11FFF, "V"),
+ (0x1239A, "X"),
+ (0x12400, "V"),
+ (0x1246F, "X"),
+ (0x12470, "V"),
+ (0x12475, "X"),
+ (0x12480, "V"),
+ (0x12544, "X"),
+ (0x12F90, "V"),
+ (0x12FF3, "X"),
+ (0x13000, "V"),
+ (0x13430, "X"),
+ (0x13440, "V"),
+ (0x13456, "X"),
+ (0x13460, "V"),
+ (0x143FB, "X"),
+ (0x14400, "V"),
+ (0x14647, "X"),
+ (0x16100, "V"),
+ (0x1613A, "X"),
+ (0x16800, "V"),
+ (0x16A39, "X"),
+ (0x16A40, "V"),
+ (0x16A5F, "X"),
+ (0x16A60, "V"),
+ (0x16A6A, "X"),
+ (0x16A6E, "V"),
+ (0x16ABF, "X"),
+ (0x16AC0, "V"),
+ (0x16ACA, "X"),
+ (0x16AD0, "V"),
+ (0x16AEE, "X"),
+ (0x16AF0, "V"),
+ (0x16AF6, "X"),
+ (0x16B00, "V"),
+ (0x16B46, "X"),
+ (0x16B50, "V"),
+ (0x16B5A, "X"),
+ (0x16B5B, "V"),
+ (0x16B62, "X"),
+ (0x16B63, "V"),
+ (0x16B78, "X"),
+ (0x16B7D, "V"),
+ (0x16B90, "X"),
+ (0x16D40, "V"),
+ (0x16D7A, "X"),
+ (0x16E40, "M", "𖹠"),
+ (0x16E41, "M", "𖹡"),
+ (0x16E42, "M", "𖹢"),
+ (0x16E43, "M", "𖹣"),
+ (0x16E44, "M", "𖹤"),
+ (0x16E45, "M", "𖹥"),
+ (0x16E46, "M", "𖹦"),
+ (0x16E47, "M", "𖹧"),
+ (0x16E48, "M", "𖹨"),
+ (0x16E49, "M", "𖹩"),
+ (0x16E4A, "M", "𖹪"),
+ (0x16E4B, "M", "𖹫"),
+ (0x16E4C, "M", "𖹬"),
+ (0x16E4D, "M", "𖹭"),
+ (0x16E4E, "M", "𖹮"),
+ (0x16E4F, "M", "𖹯"),
+ (0x16E50, "M", "𖹰"),
+ (0x16E51, "M", "𖹱"),
+ (0x16E52, "M", "𖹲"),
+ (0x16E53, "M", "𖹳"),
+ (0x16E54, "M", "𖹴"),
+ (0x16E55, "M", "𖹵"),
+ (0x16E56, "M", "𖹶"),
+ (0x16E57, "M", "𖹷"),
+ (0x16E58, "M", "𖹸"),
+ (0x16E59, "M", "𖹹"),
+ (0x16E5A, "M", "𖹺"),
+ (0x16E5B, "M", "𖹻"),
+ (0x16E5C, "M", "𖹼"),
+ (0x16E5D, "M", "𖹽"),
+ (0x16E5E, "M", "𖹾"),
+ (0x16E5F, "M", "𖹿"),
+ (0x16E60, "V"),
+ (0x16E9B, "X"),
+ (0x16F00, "V"),
+ (0x16F4B, "X"),
+ (0x16F4F, "V"),
+ (0x16F88, "X"),
+ (0x16F8F, "V"),
+ (0x16FA0, "X"),
+ (0x16FE0, "V"),
+ (0x16FE5, "X"),
+ (0x16FF0, "V"),
+ (0x16FF2, "X"),
+ (0x17000, "V"),
+ (0x187F8, "X"),
+ (0x18800, "V"),
+ (0x18CD6, "X"),
+ (0x18CFF, "V"),
+ (0x18D09, "X"),
+ ]
+
+
+def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1AFF0, "V"),
+ (0x1AFF4, "X"),
+ (0x1AFF5, "V"),
+ (0x1AFFC, "X"),
+ (0x1AFFD, "V"),
+ (0x1AFFF, "X"),
+ (0x1B000, "V"),
+ (0x1B123, "X"),
+ (0x1B132, "V"),
+ (0x1B133, "X"),
+ (0x1B150, "V"),
+ (0x1B153, "X"),
+ (0x1B155, "V"),
+ (0x1B156, "X"),
+ (0x1B164, "V"),
+ (0x1B168, "X"),
+ (0x1B170, "V"),
+ (0x1B2FC, "X"),
+ (0x1BC00, "V"),
+ (0x1BC6B, "X"),
+ (0x1BC70, "V"),
+ (0x1BC7D, "X"),
+ (0x1BC80, "V"),
+ (0x1BC89, "X"),
+ (0x1BC90, "V"),
+ (0x1BC9A, "X"),
+ (0x1BC9C, "V"),
+ (0x1BCA0, "I"),
+ (0x1BCA4, "X"),
+ (0x1CC00, "V"),
+ (0x1CCD6, "M", "a"),
+ (0x1CCD7, "M", "b"),
+ (0x1CCD8, "M", "c"),
+ (0x1CCD9, "M", "d"),
+ (0x1CCDA, "M", "e"),
+ (0x1CCDB, "M", "f"),
+ (0x1CCDC, "M", "g"),
+ (0x1CCDD, "M", "h"),
+ (0x1CCDE, "M", "i"),
+ (0x1CCDF, "M", "j"),
+ (0x1CCE0, "M", "k"),
+ (0x1CCE1, "M", "l"),
+ (0x1CCE2, "M", "m"),
+ (0x1CCE3, "M", "n"),
+ (0x1CCE4, "M", "o"),
+ (0x1CCE5, "M", "p"),
+ (0x1CCE6, "M", "q"),
+ (0x1CCE7, "M", "r"),
+ (0x1CCE8, "M", "s"),
+ (0x1CCE9, "M", "t"),
+ (0x1CCEA, "M", "u"),
+ (0x1CCEB, "M", "v"),
+ (0x1CCEC, "M", "w"),
+ (0x1CCED, "M", "x"),
+ (0x1CCEE, "M", "y"),
+ (0x1CCEF, "M", "z"),
+ (0x1CCF0, "M", "0"),
+ (0x1CCF1, "M", "1"),
+ (0x1CCF2, "M", "2"),
+ (0x1CCF3, "M", "3"),
+ (0x1CCF4, "M", "4"),
+ (0x1CCF5, "M", "5"),
+ (0x1CCF6, "M", "6"),
+ (0x1CCF7, "M", "7"),
+ (0x1CCF8, "M", "8"),
+ (0x1CCF9, "M", "9"),
+ (0x1CCFA, "X"),
+ (0x1CD00, "V"),
+ (0x1CEB4, "X"),
+ (0x1CF00, "V"),
+ (0x1CF2E, "X"),
+ (0x1CF30, "V"),
+ (0x1CF47, "X"),
+ (0x1CF50, "V"),
+ (0x1CFC4, "X"),
+ (0x1D000, "V"),
+ (0x1D0F6, "X"),
+ (0x1D100, "V"),
+ (0x1D127, "X"),
+ (0x1D129, "V"),
+ (0x1D15E, "M", "𝅗𝅥"),
+ (0x1D15F, "M", "𝅘𝅥"),
+ (0x1D160, "M", "𝅘𝅥𝅮"),
+ (0x1D161, "M", "𝅘𝅥𝅯"),
+ (0x1D162, "M", "𝅘𝅥𝅰"),
+ (0x1D163, "M", "𝅘𝅥𝅱"),
+ (0x1D164, "M", "𝅘𝅥𝅲"),
+ (0x1D165, "V"),
+ (0x1D173, "I"),
+ (0x1D17B, "V"),
+ (0x1D1BB, "M", "𝆹𝅥"),
+ (0x1D1BC, "M", "𝆺𝅥"),
+ (0x1D1BD, "M", "𝆹𝅥𝅮"),
+ (0x1D1BE, "M", "𝆺𝅥𝅮"),
+ (0x1D1BF, "M", "𝆹𝅥𝅯"),
+ (0x1D1C0, "M", "𝆺𝅥𝅯"),
+ (0x1D1C1, "V"),
+ (0x1D1EB, "X"),
+ (0x1D200, "V"),
+ (0x1D246, "X"),
+ ]
+
+
+def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D2C0, "V"),
+ (0x1D2D4, "X"),
+ (0x1D2E0, "V"),
+ (0x1D2F4, "X"),
+ (0x1D300, "V"),
+ (0x1D357, "X"),
+ (0x1D360, "V"),
+ (0x1D379, "X"),
+ (0x1D400, "M", "a"),
+ (0x1D401, "M", "b"),
+ (0x1D402, "M", "c"),
+ (0x1D403, "M", "d"),
+ (0x1D404, "M", "e"),
+ (0x1D405, "M", "f"),
+ (0x1D406, "M", "g"),
+ (0x1D407, "M", "h"),
+ (0x1D408, "M", "i"),
+ (0x1D409, "M", "j"),
+ (0x1D40A, "M", "k"),
+ (0x1D40B, "M", "l"),
+ (0x1D40C, "M", "m"),
+ (0x1D40D, "M", "n"),
+ (0x1D40E, "M", "o"),
+ (0x1D40F, "M", "p"),
+ (0x1D410, "M", "q"),
+ (0x1D411, "M", "r"),
+ (0x1D412, "M", "s"),
+ (0x1D413, "M", "t"),
+ (0x1D414, "M", "u"),
+ (0x1D415, "M", "v"),
+ (0x1D416, "M", "w"),
+ (0x1D417, "M", "x"),
+ (0x1D418, "M", "y"),
+ (0x1D419, "M", "z"),
+ (0x1D41A, "M", "a"),
+ (0x1D41B, "M", "b"),
+ (0x1D41C, "M", "c"),
+ (0x1D41D, "M", "d"),
+ (0x1D41E, "M", "e"),
+ (0x1D41F, "M", "f"),
+ (0x1D420, "M", "g"),
+ (0x1D421, "M", "h"),
+ (0x1D422, "M", "i"),
+ (0x1D423, "M", "j"),
+ (0x1D424, "M", "k"),
+ (0x1D425, "M", "l"),
+ (0x1D426, "M", "m"),
+ (0x1D427, "M", "n"),
+ (0x1D428, "M", "o"),
+ (0x1D429, "M", "p"),
+ (0x1D42A, "M", "q"),
+ (0x1D42B, "M", "r"),
+ (0x1D42C, "M", "s"),
+ (0x1D42D, "M", "t"),
+ (0x1D42E, "M", "u"),
+ (0x1D42F, "M", "v"),
+ (0x1D430, "M", "w"),
+ (0x1D431, "M", "x"),
+ (0x1D432, "M", "y"),
+ (0x1D433, "M", "z"),
+ (0x1D434, "M", "a"),
+ (0x1D435, "M", "b"),
+ (0x1D436, "M", "c"),
+ (0x1D437, "M", "d"),
+ (0x1D438, "M", "e"),
+ (0x1D439, "M", "f"),
+ (0x1D43A, "M", "g"),
+ (0x1D43B, "M", "h"),
+ (0x1D43C, "M", "i"),
+ (0x1D43D, "M", "j"),
+ (0x1D43E, "M", "k"),
+ (0x1D43F, "M", "l"),
+ (0x1D440, "M", "m"),
+ (0x1D441, "M", "n"),
+ (0x1D442, "M", "o"),
+ (0x1D443, "M", "p"),
+ (0x1D444, "M", "q"),
+ (0x1D445, "M", "r"),
+ (0x1D446, "M", "s"),
+ (0x1D447, "M", "t"),
+ (0x1D448, "M", "u"),
+ (0x1D449, "M", "v"),
+ (0x1D44A, "M", "w"),
+ (0x1D44B, "M", "x"),
+ (0x1D44C, "M", "y"),
+ (0x1D44D, "M", "z"),
+ (0x1D44E, "M", "a"),
+ (0x1D44F, "M", "b"),
+ (0x1D450, "M", "c"),
+ (0x1D451, "M", "d"),
+ (0x1D452, "M", "e"),
+ (0x1D453, "M", "f"),
+ (0x1D454, "M", "g"),
+ (0x1D455, "X"),
+ (0x1D456, "M", "i"),
+ (0x1D457, "M", "j"),
+ (0x1D458, "M", "k"),
+ (0x1D459, "M", "l"),
+ (0x1D45A, "M", "m"),
+ (0x1D45B, "M", "n"),
+ ]
+
+
+def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D45C, "M", "o"),
+ (0x1D45D, "M", "p"),
+ (0x1D45E, "M", "q"),
+ (0x1D45F, "M", "r"),
+ (0x1D460, "M", "s"),
+ (0x1D461, "M", "t"),
+ (0x1D462, "M", "u"),
+ (0x1D463, "M", "v"),
+ (0x1D464, "M", "w"),
+ (0x1D465, "M", "x"),
+ (0x1D466, "M", "y"),
+ (0x1D467, "M", "z"),
+ (0x1D468, "M", "a"),
+ (0x1D469, "M", "b"),
+ (0x1D46A, "M", "c"),
+ (0x1D46B, "M", "d"),
+ (0x1D46C, "M", "e"),
+ (0x1D46D, "M", "f"),
+ (0x1D46E, "M", "g"),
+ (0x1D46F, "M", "h"),
+ (0x1D470, "M", "i"),
+ (0x1D471, "M", "j"),
+ (0x1D472, "M", "k"),
+ (0x1D473, "M", "l"),
+ (0x1D474, "M", "m"),
+ (0x1D475, "M", "n"),
+ (0x1D476, "M", "o"),
+ (0x1D477, "M", "p"),
+ (0x1D478, "M", "q"),
+ (0x1D479, "M", "r"),
+ (0x1D47A, "M", "s"),
+ (0x1D47B, "M", "t"),
+ (0x1D47C, "M", "u"),
+ (0x1D47D, "M", "v"),
+ (0x1D47E, "M", "w"),
+ (0x1D47F, "M", "x"),
+ (0x1D480, "M", "y"),
+ (0x1D481, "M", "z"),
+ (0x1D482, "M", "a"),
+ (0x1D483, "M", "b"),
+ (0x1D484, "M", "c"),
+ (0x1D485, "M", "d"),
+ (0x1D486, "M", "e"),
+ (0x1D487, "M", "f"),
+ (0x1D488, "M", "g"),
+ (0x1D489, "M", "h"),
+ (0x1D48A, "M", "i"),
+ (0x1D48B, "M", "j"),
+ (0x1D48C, "M", "k"),
+ (0x1D48D, "M", "l"),
+ (0x1D48E, "M", "m"),
+ (0x1D48F, "M", "n"),
+ (0x1D490, "M", "o"),
+ (0x1D491, "M", "p"),
+ (0x1D492, "M", "q"),
+ (0x1D493, "M", "r"),
+ (0x1D494, "M", "s"),
+ (0x1D495, "M", "t"),
+ (0x1D496, "M", "u"),
+ (0x1D497, "M", "v"),
+ (0x1D498, "M", "w"),
+ (0x1D499, "M", "x"),
+ (0x1D49A, "M", "y"),
+ (0x1D49B, "M", "z"),
+ (0x1D49C, "M", "a"),
+ (0x1D49D, "X"),
+ (0x1D49E, "M", "c"),
+ (0x1D49F, "M", "d"),
+ (0x1D4A0, "X"),
+ (0x1D4A2, "M", "g"),
+ (0x1D4A3, "X"),
+ (0x1D4A5, "M", "j"),
+ (0x1D4A6, "M", "k"),
+ (0x1D4A7, "X"),
+ (0x1D4A9, "M", "n"),
+ (0x1D4AA, "M", "o"),
+ (0x1D4AB, "M", "p"),
+ (0x1D4AC, "M", "q"),
+ (0x1D4AD, "X"),
+ (0x1D4AE, "M", "s"),
+ (0x1D4AF, "M", "t"),
+ (0x1D4B0, "M", "u"),
+ (0x1D4B1, "M", "v"),
+ (0x1D4B2, "M", "w"),
+ (0x1D4B3, "M", "x"),
+ (0x1D4B4, "M", "y"),
+ (0x1D4B5, "M", "z"),
+ (0x1D4B6, "M", "a"),
+ (0x1D4B7, "M", "b"),
+ (0x1D4B8, "M", "c"),
+ (0x1D4B9, "M", "d"),
+ (0x1D4BA, "X"),
+ (0x1D4BB, "M", "f"),
+ (0x1D4BC, "X"),
+ (0x1D4BD, "M", "h"),
+ (0x1D4BE, "M", "i"),
+ (0x1D4BF, "M", "j"),
+ (0x1D4C0, "M", "k"),
+ (0x1D4C1, "M", "l"),
+ (0x1D4C2, "M", "m"),
+ ]
+
+
+def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D4C3, "M", "n"),
+ (0x1D4C4, "X"),
+ (0x1D4C5, "M", "p"),
+ (0x1D4C6, "M", "q"),
+ (0x1D4C7, "M", "r"),
+ (0x1D4C8, "M", "s"),
+ (0x1D4C9, "M", "t"),
+ (0x1D4CA, "M", "u"),
+ (0x1D4CB, "M", "v"),
+ (0x1D4CC, "M", "w"),
+ (0x1D4CD, "M", "x"),
+ (0x1D4CE, "M", "y"),
+ (0x1D4CF, "M", "z"),
+ (0x1D4D0, "M", "a"),
+ (0x1D4D1, "M", "b"),
+ (0x1D4D2, "M", "c"),
+ (0x1D4D3, "M", "d"),
+ (0x1D4D4, "M", "e"),
+ (0x1D4D5, "M", "f"),
+ (0x1D4D6, "M", "g"),
+ (0x1D4D7, "M", "h"),
+ (0x1D4D8, "M", "i"),
+ (0x1D4D9, "M", "j"),
+ (0x1D4DA, "M", "k"),
+ (0x1D4DB, "M", "l"),
+ (0x1D4DC, "M", "m"),
+ (0x1D4DD, "M", "n"),
+ (0x1D4DE, "M", "o"),
+ (0x1D4DF, "M", "p"),
+ (0x1D4E0, "M", "q"),
+ (0x1D4E1, "M", "r"),
+ (0x1D4E2, "M", "s"),
+ (0x1D4E3, "M", "t"),
+ (0x1D4E4, "M", "u"),
+ (0x1D4E5, "M", "v"),
+ (0x1D4E6, "M", "w"),
+ (0x1D4E7, "M", "x"),
+ (0x1D4E8, "M", "y"),
+ (0x1D4E9, "M", "z"),
+ (0x1D4EA, "M", "a"),
+ (0x1D4EB, "M", "b"),
+ (0x1D4EC, "M", "c"),
+ (0x1D4ED, "M", "d"),
+ (0x1D4EE, "M", "e"),
+ (0x1D4EF, "M", "f"),
+ (0x1D4F0, "M", "g"),
+ (0x1D4F1, "M", "h"),
+ (0x1D4F2, "M", "i"),
+ (0x1D4F3, "M", "j"),
+ (0x1D4F4, "M", "k"),
+ (0x1D4F5, "M", "l"),
+ (0x1D4F6, "M", "m"),
+ (0x1D4F7, "M", "n"),
+ (0x1D4F8, "M", "o"),
+ (0x1D4F9, "M", "p"),
+ (0x1D4FA, "M", "q"),
+ (0x1D4FB, "M", "r"),
+ (0x1D4FC, "M", "s"),
+ (0x1D4FD, "M", "t"),
+ (0x1D4FE, "M", "u"),
+ (0x1D4FF, "M", "v"),
+ (0x1D500, "M", "w"),
+ (0x1D501, "M", "x"),
+ (0x1D502, "M", "y"),
+ (0x1D503, "M", "z"),
+ (0x1D504, "M", "a"),
+ (0x1D505, "M", "b"),
+ (0x1D506, "X"),
+ (0x1D507, "M", "d"),
+ (0x1D508, "M", "e"),
+ (0x1D509, "M", "f"),
+ (0x1D50A, "M", "g"),
+ (0x1D50B, "X"),
+ (0x1D50D, "M", "j"),
+ (0x1D50E, "M", "k"),
+ (0x1D50F, "M", "l"),
+ (0x1D510, "M", "m"),
+ (0x1D511, "M", "n"),
+ (0x1D512, "M", "o"),
+ (0x1D513, "M", "p"),
+ (0x1D514, "M", "q"),
+ (0x1D515, "X"),
+ (0x1D516, "M", "s"),
+ (0x1D517, "M", "t"),
+ (0x1D518, "M", "u"),
+ (0x1D519, "M", "v"),
+ (0x1D51A, "M", "w"),
+ (0x1D51B, "M", "x"),
+ (0x1D51C, "M", "y"),
+ (0x1D51D, "X"),
+ (0x1D51E, "M", "a"),
+ (0x1D51F, "M", "b"),
+ (0x1D520, "M", "c"),
+ (0x1D521, "M", "d"),
+ (0x1D522, "M", "e"),
+ (0x1D523, "M", "f"),
+ (0x1D524, "M", "g"),
+ (0x1D525, "M", "h"),
+ (0x1D526, "M", "i"),
+ (0x1D527, "M", "j"),
+ ]
+
+
+def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D528, "M", "k"),
+ (0x1D529, "M", "l"),
+ (0x1D52A, "M", "m"),
+ (0x1D52B, "M", "n"),
+ (0x1D52C, "M", "o"),
+ (0x1D52D, "M", "p"),
+ (0x1D52E, "M", "q"),
+ (0x1D52F, "M", "r"),
+ (0x1D530, "M", "s"),
+ (0x1D531, "M", "t"),
+ (0x1D532, "M", "u"),
+ (0x1D533, "M", "v"),
+ (0x1D534, "M", "w"),
+ (0x1D535, "M", "x"),
+ (0x1D536, "M", "y"),
+ (0x1D537, "M", "z"),
+ (0x1D538, "M", "a"),
+ (0x1D539, "M", "b"),
+ (0x1D53A, "X"),
+ (0x1D53B, "M", "d"),
+ (0x1D53C, "M", "e"),
+ (0x1D53D, "M", "f"),
+ (0x1D53E, "M", "g"),
+ (0x1D53F, "X"),
+ (0x1D540, "M", "i"),
+ (0x1D541, "M", "j"),
+ (0x1D542, "M", "k"),
+ (0x1D543, "M", "l"),
+ (0x1D544, "M", "m"),
+ (0x1D545, "X"),
+ (0x1D546, "M", "o"),
+ (0x1D547, "X"),
+ (0x1D54A, "M", "s"),
+ (0x1D54B, "M", "t"),
+ (0x1D54C, "M", "u"),
+ (0x1D54D, "M", "v"),
+ (0x1D54E, "M", "w"),
+ (0x1D54F, "M", "x"),
+ (0x1D550, "M", "y"),
+ (0x1D551, "X"),
+ (0x1D552, "M", "a"),
+ (0x1D553, "M", "b"),
+ (0x1D554, "M", "c"),
+ (0x1D555, "M", "d"),
+ (0x1D556, "M", "e"),
+ (0x1D557, "M", "f"),
+ (0x1D558, "M", "g"),
+ (0x1D559, "M", "h"),
+ (0x1D55A, "M", "i"),
+ (0x1D55B, "M", "j"),
+ (0x1D55C, "M", "k"),
+ (0x1D55D, "M", "l"),
+ (0x1D55E, "M", "m"),
+ (0x1D55F, "M", "n"),
+ (0x1D560, "M", "o"),
+ (0x1D561, "M", "p"),
+ (0x1D562, "M", "q"),
+ (0x1D563, "M", "r"),
+ (0x1D564, "M", "s"),
+ (0x1D565, "M", "t"),
+ (0x1D566, "M", "u"),
+ (0x1D567, "M", "v"),
+ (0x1D568, "M", "w"),
+ (0x1D569, "M", "x"),
+ (0x1D56A, "M", "y"),
+ (0x1D56B, "M", "z"),
+ (0x1D56C, "M", "a"),
+ (0x1D56D, "M", "b"),
+ (0x1D56E, "M", "c"),
+ (0x1D56F, "M", "d"),
+ (0x1D570, "M", "e"),
+ (0x1D571, "M", "f"),
+ (0x1D572, "M", "g"),
+ (0x1D573, "M", "h"),
+ (0x1D574, "M", "i"),
+ (0x1D575, "M", "j"),
+ (0x1D576, "M", "k"),
+ (0x1D577, "M", "l"),
+ (0x1D578, "M", "m"),
+ (0x1D579, "M", "n"),
+ (0x1D57A, "M", "o"),
+ (0x1D57B, "M", "p"),
+ (0x1D57C, "M", "q"),
+ (0x1D57D, "M", "r"),
+ (0x1D57E, "M", "s"),
+ (0x1D57F, "M", "t"),
+ (0x1D580, "M", "u"),
+ (0x1D581, "M", "v"),
+ (0x1D582, "M", "w"),
+ (0x1D583, "M", "x"),
+ (0x1D584, "M", "y"),
+ (0x1D585, "M", "z"),
+ (0x1D586, "M", "a"),
+ (0x1D587, "M", "b"),
+ (0x1D588, "M", "c"),
+ (0x1D589, "M", "d"),
+ (0x1D58A, "M", "e"),
+ (0x1D58B, "M", "f"),
+ (0x1D58C, "M", "g"),
+ (0x1D58D, "M", "h"),
+ ]
+
+
+def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D58E, "M", "i"),
+ (0x1D58F, "M", "j"),
+ (0x1D590, "M", "k"),
+ (0x1D591, "M", "l"),
+ (0x1D592, "M", "m"),
+ (0x1D593, "M", "n"),
+ (0x1D594, "M", "o"),
+ (0x1D595, "M", "p"),
+ (0x1D596, "M", "q"),
+ (0x1D597, "M", "r"),
+ (0x1D598, "M", "s"),
+ (0x1D599, "M", "t"),
+ (0x1D59A, "M", "u"),
+ (0x1D59B, "M", "v"),
+ (0x1D59C, "M", "w"),
+ (0x1D59D, "M", "x"),
+ (0x1D59E, "M", "y"),
+ (0x1D59F, "M", "z"),
+ (0x1D5A0, "M", "a"),
+ (0x1D5A1, "M", "b"),
+ (0x1D5A2, "M", "c"),
+ (0x1D5A3, "M", "d"),
+ (0x1D5A4, "M", "e"),
+ (0x1D5A5, "M", "f"),
+ (0x1D5A6, "M", "g"),
+ (0x1D5A7, "M", "h"),
+ (0x1D5A8, "M", "i"),
+ (0x1D5A9, "M", "j"),
+ (0x1D5AA, "M", "k"),
+ (0x1D5AB, "M", "l"),
+ (0x1D5AC, "M", "m"),
+ (0x1D5AD, "M", "n"),
+ (0x1D5AE, "M", "o"),
+ (0x1D5AF, "M", "p"),
+ (0x1D5B0, "M", "q"),
+ (0x1D5B1, "M", "r"),
+ (0x1D5B2, "M", "s"),
+ (0x1D5B3, "M", "t"),
+ (0x1D5B4, "M", "u"),
+ (0x1D5B5, "M", "v"),
+ (0x1D5B6, "M", "w"),
+ (0x1D5B7, "M", "x"),
+ (0x1D5B8, "M", "y"),
+ (0x1D5B9, "M", "z"),
+ (0x1D5BA, "M", "a"),
+ (0x1D5BB, "M", "b"),
+ (0x1D5BC, "M", "c"),
+ (0x1D5BD, "M", "d"),
+ (0x1D5BE, "M", "e"),
+ (0x1D5BF, "M", "f"),
+ (0x1D5C0, "M", "g"),
+ (0x1D5C1, "M", "h"),
+ (0x1D5C2, "M", "i"),
+ (0x1D5C3, "M", "j"),
+ (0x1D5C4, "M", "k"),
+ (0x1D5C5, "M", "l"),
+ (0x1D5C6, "M", "m"),
+ (0x1D5C7, "M", "n"),
+ (0x1D5C8, "M", "o"),
+ (0x1D5C9, "M", "p"),
+ (0x1D5CA, "M", "q"),
+ (0x1D5CB, "M", "r"),
+ (0x1D5CC, "M", "s"),
+ (0x1D5CD, "M", "t"),
+ (0x1D5CE, "M", "u"),
+ (0x1D5CF, "M", "v"),
+ (0x1D5D0, "M", "w"),
+ (0x1D5D1, "M", "x"),
+ (0x1D5D2, "M", "y"),
+ (0x1D5D3, "M", "z"),
+ (0x1D5D4, "M", "a"),
+ (0x1D5D5, "M", "b"),
+ (0x1D5D6, "M", "c"),
+ (0x1D5D7, "M", "d"),
+ (0x1D5D8, "M", "e"),
+ (0x1D5D9, "M", "f"),
+ (0x1D5DA, "M", "g"),
+ (0x1D5DB, "M", "h"),
+ (0x1D5DC, "M", "i"),
+ (0x1D5DD, "M", "j"),
+ (0x1D5DE, "M", "k"),
+ (0x1D5DF, "M", "l"),
+ (0x1D5E0, "M", "m"),
+ (0x1D5E1, "M", "n"),
+ (0x1D5E2, "M", "o"),
+ (0x1D5E3, "M", "p"),
+ (0x1D5E4, "M", "q"),
+ (0x1D5E5, "M", "r"),
+ (0x1D5E6, "M", "s"),
+ (0x1D5E7, "M", "t"),
+ (0x1D5E8, "M", "u"),
+ (0x1D5E9, "M", "v"),
+ (0x1D5EA, "M", "w"),
+ (0x1D5EB, "M", "x"),
+ (0x1D5EC, "M", "y"),
+ (0x1D5ED, "M", "z"),
+ (0x1D5EE, "M", "a"),
+ (0x1D5EF, "M", "b"),
+ (0x1D5F0, "M", "c"),
+ (0x1D5F1, "M", "d"),
+ ]
+
+
+def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D5F2, "M", "e"),
+ (0x1D5F3, "M", "f"),
+ (0x1D5F4, "M", "g"),
+ (0x1D5F5, "M", "h"),
+ (0x1D5F6, "M", "i"),
+ (0x1D5F7, "M", "j"),
+ (0x1D5F8, "M", "k"),
+ (0x1D5F9, "M", "l"),
+ (0x1D5FA, "M", "m"),
+ (0x1D5FB, "M", "n"),
+ (0x1D5FC, "M", "o"),
+ (0x1D5FD, "M", "p"),
+ (0x1D5FE, "M", "q"),
+ (0x1D5FF, "M", "r"),
+ (0x1D600, "M", "s"),
+ (0x1D601, "M", "t"),
+ (0x1D602, "M", "u"),
+ (0x1D603, "M", "v"),
+ (0x1D604, "M", "w"),
+ (0x1D605, "M", "x"),
+ (0x1D606, "M", "y"),
+ (0x1D607, "M", "z"),
+ (0x1D608, "M", "a"),
+ (0x1D609, "M", "b"),
+ (0x1D60A, "M", "c"),
+ (0x1D60B, "M", "d"),
+ (0x1D60C, "M", "e"),
+ (0x1D60D, "M", "f"),
+ (0x1D60E, "M", "g"),
+ (0x1D60F, "M", "h"),
+ (0x1D610, "M", "i"),
+ (0x1D611, "M", "j"),
+ (0x1D612, "M", "k"),
+ (0x1D613, "M", "l"),
+ (0x1D614, "M", "m"),
+ (0x1D615, "M", "n"),
+ (0x1D616, "M", "o"),
+ (0x1D617, "M", "p"),
+ (0x1D618, "M", "q"),
+ (0x1D619, "M", "r"),
+ (0x1D61A, "M", "s"),
+ (0x1D61B, "M", "t"),
+ (0x1D61C, "M", "u"),
+ (0x1D61D, "M", "v"),
+ (0x1D61E, "M", "w"),
+ (0x1D61F, "M", "x"),
+ (0x1D620, "M", "y"),
+ (0x1D621, "M", "z"),
+ (0x1D622, "M", "a"),
+ (0x1D623, "M", "b"),
+ (0x1D624, "M", "c"),
+ (0x1D625, "M", "d"),
+ (0x1D626, "M", "e"),
+ (0x1D627, "M", "f"),
+ (0x1D628, "M", "g"),
+ (0x1D629, "M", "h"),
+ (0x1D62A, "M", "i"),
+ (0x1D62B, "M", "j"),
+ (0x1D62C, "M", "k"),
+ (0x1D62D, "M", "l"),
+ (0x1D62E, "M", "m"),
+ (0x1D62F, "M", "n"),
+ (0x1D630, "M", "o"),
+ (0x1D631, "M", "p"),
+ (0x1D632, "M", "q"),
+ (0x1D633, "M", "r"),
+ (0x1D634, "M", "s"),
+ (0x1D635, "M", "t"),
+ (0x1D636, "M", "u"),
+ (0x1D637, "M", "v"),
+ (0x1D638, "M", "w"),
+ (0x1D639, "M", "x"),
+ (0x1D63A, "M", "y"),
+ (0x1D63B, "M", "z"),
+ (0x1D63C, "M", "a"),
+ (0x1D63D, "M", "b"),
+ (0x1D63E, "M", "c"),
+ (0x1D63F, "M", "d"),
+ (0x1D640, "M", "e"),
+ (0x1D641, "M", "f"),
+ (0x1D642, "M", "g"),
+ (0x1D643, "M", "h"),
+ (0x1D644, "M", "i"),
+ (0x1D645, "M", "j"),
+ (0x1D646, "M", "k"),
+ (0x1D647, "M", "l"),
+ (0x1D648, "M", "m"),
+ (0x1D649, "M", "n"),
+ (0x1D64A, "M", "o"),
+ (0x1D64B, "M", "p"),
+ (0x1D64C, "M", "q"),
+ (0x1D64D, "M", "r"),
+ (0x1D64E, "M", "s"),
+ (0x1D64F, "M", "t"),
+ (0x1D650, "M", "u"),
+ (0x1D651, "M", "v"),
+ (0x1D652, "M", "w"),
+ (0x1D653, "M", "x"),
+ (0x1D654, "M", "y"),
+ (0x1D655, "M", "z"),
+ ]
+
+
+def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D656, "M", "a"),
+ (0x1D657, "M", "b"),
+ (0x1D658, "M", "c"),
+ (0x1D659, "M", "d"),
+ (0x1D65A, "M", "e"),
+ (0x1D65B, "M", "f"),
+ (0x1D65C, "M", "g"),
+ (0x1D65D, "M", "h"),
+ (0x1D65E, "M", "i"),
+ (0x1D65F, "M", "j"),
+ (0x1D660, "M", "k"),
+ (0x1D661, "M", "l"),
+ (0x1D662, "M", "m"),
+ (0x1D663, "M", "n"),
+ (0x1D664, "M", "o"),
+ (0x1D665, "M", "p"),
+ (0x1D666, "M", "q"),
+ (0x1D667, "M", "r"),
+ (0x1D668, "M", "s"),
+ (0x1D669, "M", "t"),
+ (0x1D66A, "M", "u"),
+ (0x1D66B, "M", "v"),
+ (0x1D66C, "M", "w"),
+ (0x1D66D, "M", "x"),
+ (0x1D66E, "M", "y"),
+ (0x1D66F, "M", "z"),
+ (0x1D670, "M", "a"),
+ (0x1D671, "M", "b"),
+ (0x1D672, "M", "c"),
+ (0x1D673, "M", "d"),
+ (0x1D674, "M", "e"),
+ (0x1D675, "M", "f"),
+ (0x1D676, "M", "g"),
+ (0x1D677, "M", "h"),
+ (0x1D678, "M", "i"),
+ (0x1D679, "M", "j"),
+ (0x1D67A, "M", "k"),
+ (0x1D67B, "M", "l"),
+ (0x1D67C, "M", "m"),
+ (0x1D67D, "M", "n"),
+ (0x1D67E, "M", "o"),
+ (0x1D67F, "M", "p"),
+ (0x1D680, "M", "q"),
+ (0x1D681, "M", "r"),
+ (0x1D682, "M", "s"),
+ (0x1D683, "M", "t"),
+ (0x1D684, "M", "u"),
+ (0x1D685, "M", "v"),
+ (0x1D686, "M", "w"),
+ (0x1D687, "M", "x"),
+ (0x1D688, "M", "y"),
+ (0x1D689, "M", "z"),
+ (0x1D68A, "M", "a"),
+ (0x1D68B, "M", "b"),
+ (0x1D68C, "M", "c"),
+ (0x1D68D, "M", "d"),
+ (0x1D68E, "M", "e"),
+ (0x1D68F, "M", "f"),
+ (0x1D690, "M", "g"),
+ (0x1D691, "M", "h"),
+ (0x1D692, "M", "i"),
+ (0x1D693, "M", "j"),
+ (0x1D694, "M", "k"),
+ (0x1D695, "M", "l"),
+ (0x1D696, "M", "m"),
+ (0x1D697, "M", "n"),
+ (0x1D698, "M", "o"),
+ (0x1D699, "M", "p"),
+ (0x1D69A, "M", "q"),
+ (0x1D69B, "M", "r"),
+ (0x1D69C, "M", "s"),
+ (0x1D69D, "M", "t"),
+ (0x1D69E, "M", "u"),
+ (0x1D69F, "M", "v"),
+ (0x1D6A0, "M", "w"),
+ (0x1D6A1, "M", "x"),
+ (0x1D6A2, "M", "y"),
+ (0x1D6A3, "M", "z"),
+ (0x1D6A4, "M", "ı"),
+ (0x1D6A5, "M", "ȷ"),
+ (0x1D6A6, "X"),
+ (0x1D6A8, "M", "α"),
+ (0x1D6A9, "M", "β"),
+ (0x1D6AA, "M", "γ"),
+ (0x1D6AB, "M", "δ"),
+ (0x1D6AC, "M", "ε"),
+ (0x1D6AD, "M", "ζ"),
+ (0x1D6AE, "M", "η"),
+ (0x1D6AF, "M", "θ"),
+ (0x1D6B0, "M", "ι"),
+ (0x1D6B1, "M", "κ"),
+ (0x1D6B2, "M", "λ"),
+ (0x1D6B3, "M", "μ"),
+ (0x1D6B4, "M", "ν"),
+ (0x1D6B5, "M", "ξ"),
+ (0x1D6B6, "M", "ο"),
+ (0x1D6B7, "M", "π"),
+ (0x1D6B8, "M", "ρ"),
+ (0x1D6B9, "M", "θ"),
+ (0x1D6BA, "M", "σ"),
+ ]
+
+
+def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D6BB, "M", "τ"),
+ (0x1D6BC, "M", "υ"),
+ (0x1D6BD, "M", "φ"),
+ (0x1D6BE, "M", "χ"),
+ (0x1D6BF, "M", "ψ"),
+ (0x1D6C0, "M", "ω"),
+ (0x1D6C1, "M", "∇"),
+ (0x1D6C2, "M", "α"),
+ (0x1D6C3, "M", "β"),
+ (0x1D6C4, "M", "γ"),
+ (0x1D6C5, "M", "δ"),
+ (0x1D6C6, "M", "ε"),
+ (0x1D6C7, "M", "ζ"),
+ (0x1D6C8, "M", "η"),
+ (0x1D6C9, "M", "θ"),
+ (0x1D6CA, "M", "ι"),
+ (0x1D6CB, "M", "κ"),
+ (0x1D6CC, "M", "λ"),
+ (0x1D6CD, "M", "μ"),
+ (0x1D6CE, "M", "ν"),
+ (0x1D6CF, "M", "ξ"),
+ (0x1D6D0, "M", "ο"),
+ (0x1D6D1, "M", "π"),
+ (0x1D6D2, "M", "ρ"),
+ (0x1D6D3, "M", "σ"),
+ (0x1D6D5, "M", "τ"),
+ (0x1D6D6, "M", "υ"),
+ (0x1D6D7, "M", "φ"),
+ (0x1D6D8, "M", "χ"),
+ (0x1D6D9, "M", "ψ"),
+ (0x1D6DA, "M", "ω"),
+ (0x1D6DB, "M", "∂"),
+ (0x1D6DC, "M", "ε"),
+ (0x1D6DD, "M", "θ"),
+ (0x1D6DE, "M", "κ"),
+ (0x1D6DF, "M", "φ"),
+ (0x1D6E0, "M", "ρ"),
+ (0x1D6E1, "M", "π"),
+ (0x1D6E2, "M", "α"),
+ (0x1D6E3, "M", "β"),
+ (0x1D6E4, "M", "γ"),
+ (0x1D6E5, "M", "δ"),
+ (0x1D6E6, "M", "ε"),
+ (0x1D6E7, "M", "ζ"),
+ (0x1D6E8, "M", "η"),
+ (0x1D6E9, "M", "θ"),
+ (0x1D6EA, "M", "ι"),
+ (0x1D6EB, "M", "κ"),
+ (0x1D6EC, "M", "λ"),
+ (0x1D6ED, "M", "μ"),
+ (0x1D6EE, "M", "ν"),
+ (0x1D6EF, "M", "ξ"),
+ (0x1D6F0, "M", "ο"),
+ (0x1D6F1, "M", "π"),
+ (0x1D6F2, "M", "ρ"),
+ (0x1D6F3, "M", "θ"),
+ (0x1D6F4, "M", "σ"),
+ (0x1D6F5, "M", "τ"),
+ (0x1D6F6, "M", "υ"),
+ (0x1D6F7, "M", "φ"),
+ (0x1D6F8, "M", "χ"),
+ (0x1D6F9, "M", "ψ"),
+ (0x1D6FA, "M", "ω"),
+ (0x1D6FB, "M", "∇"),
+ (0x1D6FC, "M", "α"),
+ (0x1D6FD, "M", "β"),
+ (0x1D6FE, "M", "γ"),
+ (0x1D6FF, "M", "δ"),
+ (0x1D700, "M", "ε"),
+ (0x1D701, "M", "ζ"),
+ (0x1D702, "M", "η"),
+ (0x1D703, "M", "θ"),
+ (0x1D704, "M", "ι"),
+ (0x1D705, "M", "κ"),
+ (0x1D706, "M", "λ"),
+ (0x1D707, "M", "μ"),
+ (0x1D708, "M", "ν"),
+ (0x1D709, "M", "ξ"),
+ (0x1D70A, "M", "ο"),
+ (0x1D70B, "M", "π"),
+ (0x1D70C, "M", "ρ"),
+ (0x1D70D, "M", "σ"),
+ (0x1D70F, "M", "τ"),
+ (0x1D710, "M", "υ"),
+ (0x1D711, "M", "φ"),
+ (0x1D712, "M", "χ"),
+ (0x1D713, "M", "ψ"),
+ (0x1D714, "M", "ω"),
+ (0x1D715, "M", "∂"),
+ (0x1D716, "M", "ε"),
+ (0x1D717, "M", "θ"),
+ (0x1D718, "M", "κ"),
+ (0x1D719, "M", "φ"),
+ (0x1D71A, "M", "ρ"),
+ (0x1D71B, "M", "π"),
+ (0x1D71C, "M", "α"),
+ (0x1D71D, "M", "β"),
+ (0x1D71E, "M", "γ"),
+ (0x1D71F, "M", "δ"),
+ (0x1D720, "M", "ε"),
+ ]
+
+
+def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D721, "M", "ζ"),
+ (0x1D722, "M", "η"),
+ (0x1D723, "M", "θ"),
+ (0x1D724, "M", "ι"),
+ (0x1D725, "M", "κ"),
+ (0x1D726, "M", "λ"),
+ (0x1D727, "M", "μ"),
+ (0x1D728, "M", "ν"),
+ (0x1D729, "M", "ξ"),
+ (0x1D72A, "M", "ο"),
+ (0x1D72B, "M", "π"),
+ (0x1D72C, "M", "ρ"),
+ (0x1D72D, "M", "θ"),
+ (0x1D72E, "M", "σ"),
+ (0x1D72F, "M", "τ"),
+ (0x1D730, "M", "υ"),
+ (0x1D731, "M", "φ"),
+ (0x1D732, "M", "χ"),
+ (0x1D733, "M", "ψ"),
+ (0x1D734, "M", "ω"),
+ (0x1D735, "M", "∇"),
+ (0x1D736, "M", "α"),
+ (0x1D737, "M", "β"),
+ (0x1D738, "M", "γ"),
+ (0x1D739, "M", "δ"),
+ (0x1D73A, "M", "ε"),
+ (0x1D73B, "M", "ζ"),
+ (0x1D73C, "M", "η"),
+ (0x1D73D, "M", "θ"),
+ (0x1D73E, "M", "ι"),
+ (0x1D73F, "M", "κ"),
+ (0x1D740, "M", "λ"),
+ (0x1D741, "M", "μ"),
+ (0x1D742, "M", "ν"),
+ (0x1D743, "M", "ξ"),
+ (0x1D744, "M", "ο"),
+ (0x1D745, "M", "π"),
+ (0x1D746, "M", "ρ"),
+ (0x1D747, "M", "σ"),
+ (0x1D749, "M", "τ"),
+ (0x1D74A, "M", "υ"),
+ (0x1D74B, "M", "φ"),
+ (0x1D74C, "M", "χ"),
+ (0x1D74D, "M", "ψ"),
+ (0x1D74E, "M", "ω"),
+ (0x1D74F, "M", "∂"),
+ (0x1D750, "M", "ε"),
+ (0x1D751, "M", "θ"),
+ (0x1D752, "M", "κ"),
+ (0x1D753, "M", "φ"),
+ (0x1D754, "M", "ρ"),
+ (0x1D755, "M", "π"),
+ (0x1D756, "M", "α"),
+ (0x1D757, "M", "β"),
+ (0x1D758, "M", "γ"),
+ (0x1D759, "M", "δ"),
+ (0x1D75A, "M", "ε"),
+ (0x1D75B, "M", "ζ"),
+ (0x1D75C, "M", "η"),
+ (0x1D75D, "M", "θ"),
+ (0x1D75E, "M", "ι"),
+ (0x1D75F, "M", "κ"),
+ (0x1D760, "M", "λ"),
+ (0x1D761, "M", "μ"),
+ (0x1D762, "M", "ν"),
+ (0x1D763, "M", "ξ"),
+ (0x1D764, "M", "ο"),
+ (0x1D765, "M", "π"),
+ (0x1D766, "M", "ρ"),
+ (0x1D767, "M", "θ"),
+ (0x1D768, "M", "σ"),
+ (0x1D769, "M", "τ"),
+ (0x1D76A, "M", "υ"),
+ (0x1D76B, "M", "φ"),
+ (0x1D76C, "M", "χ"),
+ (0x1D76D, "M", "ψ"),
+ (0x1D76E, "M", "ω"),
+ (0x1D76F, "M", "∇"),
+ (0x1D770, "M", "α"),
+ (0x1D771, "M", "β"),
+ (0x1D772, "M", "γ"),
+ (0x1D773, "M", "δ"),
+ (0x1D774, "M", "ε"),
+ (0x1D775, "M", "ζ"),
+ (0x1D776, "M", "η"),
+ (0x1D777, "M", "θ"),
+ (0x1D778, "M", "ι"),
+ (0x1D779, "M", "κ"),
+ (0x1D77A, "M", "λ"),
+ (0x1D77B, "M", "μ"),
+ (0x1D77C, "M", "ν"),
+ (0x1D77D, "M", "ξ"),
+ (0x1D77E, "M", "ο"),
+ (0x1D77F, "M", "π"),
+ (0x1D780, "M", "ρ"),
+ (0x1D781, "M", "σ"),
+ (0x1D783, "M", "τ"),
+ (0x1D784, "M", "υ"),
+ (0x1D785, "M", "φ"),
+ (0x1D786, "M", "χ"),
+ ]
+
+
+def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D787, "M", "ψ"),
+ (0x1D788, "M", "ω"),
+ (0x1D789, "M", "∂"),
+ (0x1D78A, "M", "ε"),
+ (0x1D78B, "M", "θ"),
+ (0x1D78C, "M", "κ"),
+ (0x1D78D, "M", "φ"),
+ (0x1D78E, "M", "ρ"),
+ (0x1D78F, "M", "π"),
+ (0x1D790, "M", "α"),
+ (0x1D791, "M", "β"),
+ (0x1D792, "M", "γ"),
+ (0x1D793, "M", "δ"),
+ (0x1D794, "M", "ε"),
+ (0x1D795, "M", "ζ"),
+ (0x1D796, "M", "η"),
+ (0x1D797, "M", "θ"),
+ (0x1D798, "M", "ι"),
+ (0x1D799, "M", "κ"),
+ (0x1D79A, "M", "λ"),
+ (0x1D79B, "M", "μ"),
+ (0x1D79C, "M", "ν"),
+ (0x1D79D, "M", "ξ"),
+ (0x1D79E, "M", "ο"),
+ (0x1D79F, "M", "π"),
+ (0x1D7A0, "M", "ρ"),
+ (0x1D7A1, "M", "θ"),
+ (0x1D7A2, "M", "σ"),
+ (0x1D7A3, "M", "τ"),
+ (0x1D7A4, "M", "υ"),
+ (0x1D7A5, "M", "φ"),
+ (0x1D7A6, "M", "χ"),
+ (0x1D7A7, "M", "ψ"),
+ (0x1D7A8, "M", "ω"),
+ (0x1D7A9, "M", "∇"),
+ (0x1D7AA, "M", "α"),
+ (0x1D7AB, "M", "β"),
+ (0x1D7AC, "M", "γ"),
+ (0x1D7AD, "M", "δ"),
+ (0x1D7AE, "M", "ε"),
+ (0x1D7AF, "M", "ζ"),
+ (0x1D7B0, "M", "η"),
+ (0x1D7B1, "M", "θ"),
+ (0x1D7B2, "M", "ι"),
+ (0x1D7B3, "M", "κ"),
+ (0x1D7B4, "M", "λ"),
+ (0x1D7B5, "M", "μ"),
+ (0x1D7B6, "M", "ν"),
+ (0x1D7B7, "M", "ξ"),
+ (0x1D7B8, "M", "ο"),
+ (0x1D7B9, "M", "π"),
+ (0x1D7BA, "M", "ρ"),
+ (0x1D7BB, "M", "σ"),
+ (0x1D7BD, "M", "τ"),
+ (0x1D7BE, "M", "υ"),
+ (0x1D7BF, "M", "φ"),
+ (0x1D7C0, "M", "χ"),
+ (0x1D7C1, "M", "ψ"),
+ (0x1D7C2, "M", "ω"),
+ (0x1D7C3, "M", "∂"),
+ (0x1D7C4, "M", "ε"),
+ (0x1D7C5, "M", "θ"),
+ (0x1D7C6, "M", "κ"),
+ (0x1D7C7, "M", "φ"),
+ (0x1D7C8, "M", "ρ"),
+ (0x1D7C9, "M", "π"),
+ (0x1D7CA, "M", "ϝ"),
+ (0x1D7CC, "X"),
+ (0x1D7CE, "M", "0"),
+ (0x1D7CF, "M", "1"),
+ (0x1D7D0, "M", "2"),
+ (0x1D7D1, "M", "3"),
+ (0x1D7D2, "M", "4"),
+ (0x1D7D3, "M", "5"),
+ (0x1D7D4, "M", "6"),
+ (0x1D7D5, "M", "7"),
+ (0x1D7D6, "M", "8"),
+ (0x1D7D7, "M", "9"),
+ (0x1D7D8, "M", "0"),
+ (0x1D7D9, "M", "1"),
+ (0x1D7DA, "M", "2"),
+ (0x1D7DB, "M", "3"),
+ (0x1D7DC, "M", "4"),
+ (0x1D7DD, "M", "5"),
+ (0x1D7DE, "M", "6"),
+ (0x1D7DF, "M", "7"),
+ (0x1D7E0, "M", "8"),
+ (0x1D7E1, "M", "9"),
+ (0x1D7E2, "M", "0"),
+ (0x1D7E3, "M", "1"),
+ (0x1D7E4, "M", "2"),
+ (0x1D7E5, "M", "3"),
+ (0x1D7E6, "M", "4"),
+ (0x1D7E7, "M", "5"),
+ (0x1D7E8, "M", "6"),
+ (0x1D7E9, "M", "7"),
+ (0x1D7EA, "M", "8"),
+ (0x1D7EB, "M", "9"),
+ (0x1D7EC, "M", "0"),
+ (0x1D7ED, "M", "1"),
+ ]
+
+
+def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1D7EE, "M", "2"),
+ (0x1D7EF, "M", "3"),
+ (0x1D7F0, "M", "4"),
+ (0x1D7F1, "M", "5"),
+ (0x1D7F2, "M", "6"),
+ (0x1D7F3, "M", "7"),
+ (0x1D7F4, "M", "8"),
+ (0x1D7F5, "M", "9"),
+ (0x1D7F6, "M", "0"),
+ (0x1D7F7, "M", "1"),
+ (0x1D7F8, "M", "2"),
+ (0x1D7F9, "M", "3"),
+ (0x1D7FA, "M", "4"),
+ (0x1D7FB, "M", "5"),
+ (0x1D7FC, "M", "6"),
+ (0x1D7FD, "M", "7"),
+ (0x1D7FE, "M", "8"),
+ (0x1D7FF, "M", "9"),
+ (0x1D800, "V"),
+ (0x1DA8C, "X"),
+ (0x1DA9B, "V"),
+ (0x1DAA0, "X"),
+ (0x1DAA1, "V"),
+ (0x1DAB0, "X"),
+ (0x1DF00, "V"),
+ (0x1DF1F, "X"),
+ (0x1DF25, "V"),
+ (0x1DF2B, "X"),
+ (0x1E000, "V"),
+ (0x1E007, "X"),
+ (0x1E008, "V"),
+ (0x1E019, "X"),
+ (0x1E01B, "V"),
+ (0x1E022, "X"),
+ (0x1E023, "V"),
+ (0x1E025, "X"),
+ (0x1E026, "V"),
+ (0x1E02B, "X"),
+ (0x1E030, "M", "а"),
+ (0x1E031, "M", "б"),
+ (0x1E032, "M", "в"),
+ (0x1E033, "M", "г"),
+ (0x1E034, "M", "д"),
+ (0x1E035, "M", "е"),
+ (0x1E036, "M", "ж"),
+ (0x1E037, "M", "з"),
+ (0x1E038, "M", "и"),
+ (0x1E039, "M", "к"),
+ (0x1E03A, "M", "л"),
+ (0x1E03B, "M", "м"),
+ (0x1E03C, "M", "о"),
+ (0x1E03D, "M", "п"),
+ (0x1E03E, "M", "р"),
+ (0x1E03F, "M", "с"),
+ (0x1E040, "M", "т"),
+ (0x1E041, "M", "у"),
+ (0x1E042, "M", "ф"),
+ (0x1E043, "M", "х"),
+ (0x1E044, "M", "ц"),
+ (0x1E045, "M", "ч"),
+ (0x1E046, "M", "ш"),
+ (0x1E047, "M", "ы"),
+ (0x1E048, "M", "э"),
+ (0x1E049, "M", "ю"),
+ (0x1E04A, "M", "ꚉ"),
+ (0x1E04B, "M", "ә"),
+ (0x1E04C, "M", "і"),
+ (0x1E04D, "M", "ј"),
+ (0x1E04E, "M", "ө"),
+ (0x1E04F, "M", "ү"),
+ (0x1E050, "M", "ӏ"),
+ (0x1E051, "M", "а"),
+ (0x1E052, "M", "б"),
+ (0x1E053, "M", "в"),
+ (0x1E054, "M", "г"),
+ (0x1E055, "M", "д"),
+ (0x1E056, "M", "е"),
+ (0x1E057, "M", "ж"),
+ (0x1E058, "M", "з"),
+ (0x1E059, "M", "и"),
+ (0x1E05A, "M", "к"),
+ (0x1E05B, "M", "л"),
+ (0x1E05C, "M", "о"),
+ (0x1E05D, "M", "п"),
+ (0x1E05E, "M", "с"),
+ (0x1E05F, "M", "у"),
+ (0x1E060, "M", "ф"),
+ (0x1E061, "M", "х"),
+ (0x1E062, "M", "ц"),
+ (0x1E063, "M", "ч"),
+ (0x1E064, "M", "ш"),
+ (0x1E065, "M", "ъ"),
+ (0x1E066, "M", "ы"),
+ (0x1E067, "M", "ґ"),
+ (0x1E068, "M", "і"),
+ (0x1E069, "M", "ѕ"),
+ (0x1E06A, "M", "џ"),
+ (0x1E06B, "M", "ҫ"),
+ (0x1E06C, "M", "ꙑ"),
+ (0x1E06D, "M", "ұ"),
+ ]
+
+
+def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1E06E, "X"),
+ (0x1E08F, "V"),
+ (0x1E090, "X"),
+ (0x1E100, "V"),
+ (0x1E12D, "X"),
+ (0x1E130, "V"),
+ (0x1E13E, "X"),
+ (0x1E140, "V"),
+ (0x1E14A, "X"),
+ (0x1E14E, "V"),
+ (0x1E150, "X"),
+ (0x1E290, "V"),
+ (0x1E2AF, "X"),
+ (0x1E2C0, "V"),
+ (0x1E2FA, "X"),
+ (0x1E2FF, "V"),
+ (0x1E300, "X"),
+ (0x1E4D0, "V"),
+ (0x1E4FA, "X"),
+ (0x1E5D0, "V"),
+ (0x1E5FB, "X"),
+ (0x1E5FF, "V"),
+ (0x1E600, "X"),
+ (0x1E7E0, "V"),
+ (0x1E7E7, "X"),
+ (0x1E7E8, "V"),
+ (0x1E7EC, "X"),
+ (0x1E7ED, "V"),
+ (0x1E7EF, "X"),
+ (0x1E7F0, "V"),
+ (0x1E7FF, "X"),
+ (0x1E800, "V"),
+ (0x1E8C5, "X"),
+ (0x1E8C7, "V"),
+ (0x1E8D7, "X"),
+ (0x1E900, "M", "𞤢"),
+ (0x1E901, "M", "𞤣"),
+ (0x1E902, "M", "𞤤"),
+ (0x1E903, "M", "𞤥"),
+ (0x1E904, "M", "𞤦"),
+ (0x1E905, "M", "𞤧"),
+ (0x1E906, "M", "𞤨"),
+ (0x1E907, "M", "𞤩"),
+ (0x1E908, "M", "𞤪"),
+ (0x1E909, "M", "𞤫"),
+ (0x1E90A, "M", "𞤬"),
+ (0x1E90B, "M", "𞤭"),
+ (0x1E90C, "M", "𞤮"),
+ (0x1E90D, "M", "𞤯"),
+ (0x1E90E, "M", "𞤰"),
+ (0x1E90F, "M", "𞤱"),
+ (0x1E910, "M", "𞤲"),
+ (0x1E911, "M", "𞤳"),
+ (0x1E912, "M", "𞤴"),
+ (0x1E913, "M", "𞤵"),
+ (0x1E914, "M", "𞤶"),
+ (0x1E915, "M", "𞤷"),
+ (0x1E916, "M", "𞤸"),
+ (0x1E917, "M", "𞤹"),
+ (0x1E918, "M", "𞤺"),
+ (0x1E919, "M", "𞤻"),
+ (0x1E91A, "M", "𞤼"),
+ (0x1E91B, "M", "𞤽"),
+ (0x1E91C, "M", "𞤾"),
+ (0x1E91D, "M", "𞤿"),
+ (0x1E91E, "M", "𞥀"),
+ (0x1E91F, "M", "𞥁"),
+ (0x1E920, "M", "𞥂"),
+ (0x1E921, "M", "𞥃"),
+ (0x1E922, "V"),
+ (0x1E94C, "X"),
+ (0x1E950, "V"),
+ (0x1E95A, "X"),
+ (0x1E95E, "V"),
+ (0x1E960, "X"),
+ (0x1EC71, "V"),
+ (0x1ECB5, "X"),
+ (0x1ED01, "V"),
+ (0x1ED3E, "X"),
+ (0x1EE00, "M", "ا"),
+ (0x1EE01, "M", "ب"),
+ (0x1EE02, "M", "ج"),
+ (0x1EE03, "M", "د"),
+ (0x1EE04, "X"),
+ (0x1EE05, "M", "و"),
+ (0x1EE06, "M", "ز"),
+ (0x1EE07, "M", "ح"),
+ (0x1EE08, "M", "ط"),
+ (0x1EE09, "M", "ي"),
+ (0x1EE0A, "M", "ك"),
+ (0x1EE0B, "M", "ل"),
+ (0x1EE0C, "M", "م"),
+ (0x1EE0D, "M", "ن"),
+ (0x1EE0E, "M", "س"),
+ (0x1EE0F, "M", "ع"),
+ (0x1EE10, "M", "ف"),
+ (0x1EE11, "M", "ص"),
+ (0x1EE12, "M", "ق"),
+ (0x1EE13, "M", "ر"),
+ (0x1EE14, "M", "ش"),
+ ]
+
+
+def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EE15, "M", "ت"),
+ (0x1EE16, "M", "ث"),
+ (0x1EE17, "M", "خ"),
+ (0x1EE18, "M", "ذ"),
+ (0x1EE19, "M", "ض"),
+ (0x1EE1A, "M", "ظ"),
+ (0x1EE1B, "M", "غ"),
+ (0x1EE1C, "M", "ٮ"),
+ (0x1EE1D, "M", "ں"),
+ (0x1EE1E, "M", "ڡ"),
+ (0x1EE1F, "M", "ٯ"),
+ (0x1EE20, "X"),
+ (0x1EE21, "M", "ب"),
+ (0x1EE22, "M", "ج"),
+ (0x1EE23, "X"),
+ (0x1EE24, "M", "ه"),
+ (0x1EE25, "X"),
+ (0x1EE27, "M", "ح"),
+ (0x1EE28, "X"),
+ (0x1EE29, "M", "ي"),
+ (0x1EE2A, "M", "ك"),
+ (0x1EE2B, "M", "ل"),
+ (0x1EE2C, "M", "م"),
+ (0x1EE2D, "M", "ن"),
+ (0x1EE2E, "M", "س"),
+ (0x1EE2F, "M", "ع"),
+ (0x1EE30, "M", "ف"),
+ (0x1EE31, "M", "ص"),
+ (0x1EE32, "M", "ق"),
+ (0x1EE33, "X"),
+ (0x1EE34, "M", "ش"),
+ (0x1EE35, "M", "ت"),
+ (0x1EE36, "M", "ث"),
+ (0x1EE37, "M", "خ"),
+ (0x1EE38, "X"),
+ (0x1EE39, "M", "ض"),
+ (0x1EE3A, "X"),
+ (0x1EE3B, "M", "غ"),
+ (0x1EE3C, "X"),
+ (0x1EE42, "M", "ج"),
+ (0x1EE43, "X"),
+ (0x1EE47, "M", "ح"),
+ (0x1EE48, "X"),
+ (0x1EE49, "M", "ي"),
+ (0x1EE4A, "X"),
+ (0x1EE4B, "M", "ل"),
+ (0x1EE4C, "X"),
+ (0x1EE4D, "M", "ن"),
+ (0x1EE4E, "M", "س"),
+ (0x1EE4F, "M", "ع"),
+ (0x1EE50, "X"),
+ (0x1EE51, "M", "ص"),
+ (0x1EE52, "M", "ق"),
+ (0x1EE53, "X"),
+ (0x1EE54, "M", "ش"),
+ (0x1EE55, "X"),
+ (0x1EE57, "M", "خ"),
+ (0x1EE58, "X"),
+ (0x1EE59, "M", "ض"),
+ (0x1EE5A, "X"),
+ (0x1EE5B, "M", "غ"),
+ (0x1EE5C, "X"),
+ (0x1EE5D, "M", "ں"),
+ (0x1EE5E, "X"),
+ (0x1EE5F, "M", "ٯ"),
+ (0x1EE60, "X"),
+ (0x1EE61, "M", "ب"),
+ (0x1EE62, "M", "ج"),
+ (0x1EE63, "X"),
+ (0x1EE64, "M", "ه"),
+ (0x1EE65, "X"),
+ (0x1EE67, "M", "ح"),
+ (0x1EE68, "M", "ط"),
+ (0x1EE69, "M", "ي"),
+ (0x1EE6A, "M", "ك"),
+ (0x1EE6B, "X"),
+ (0x1EE6C, "M", "م"),
+ (0x1EE6D, "M", "ن"),
+ (0x1EE6E, "M", "س"),
+ (0x1EE6F, "M", "ع"),
+ (0x1EE70, "M", "ف"),
+ (0x1EE71, "M", "ص"),
+ (0x1EE72, "M", "ق"),
+ (0x1EE73, "X"),
+ (0x1EE74, "M", "ش"),
+ (0x1EE75, "M", "ت"),
+ (0x1EE76, "M", "ث"),
+ (0x1EE77, "M", "خ"),
+ (0x1EE78, "X"),
+ (0x1EE79, "M", "ض"),
+ (0x1EE7A, "M", "ظ"),
+ (0x1EE7B, "M", "غ"),
+ (0x1EE7C, "M", "ٮ"),
+ (0x1EE7D, "X"),
+ (0x1EE7E, "M", "ڡ"),
+ (0x1EE7F, "X"),
+ (0x1EE80, "M", "ا"),
+ (0x1EE81, "M", "ب"),
+ (0x1EE82, "M", "ج"),
+ (0x1EE83, "M", "د"),
+ ]
+
+
+def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1EE84, "M", "ه"),
+ (0x1EE85, "M", "و"),
+ (0x1EE86, "M", "ز"),
+ (0x1EE87, "M", "ح"),
+ (0x1EE88, "M", "ط"),
+ (0x1EE89, "M", "ي"),
+ (0x1EE8A, "X"),
+ (0x1EE8B, "M", "ل"),
+ (0x1EE8C, "M", "م"),
+ (0x1EE8D, "M", "ن"),
+ (0x1EE8E, "M", "س"),
+ (0x1EE8F, "M", "ع"),
+ (0x1EE90, "M", "ف"),
+ (0x1EE91, "M", "ص"),
+ (0x1EE92, "M", "ق"),
+ (0x1EE93, "M", "ر"),
+ (0x1EE94, "M", "ش"),
+ (0x1EE95, "M", "ت"),
+ (0x1EE96, "M", "ث"),
+ (0x1EE97, "M", "خ"),
+ (0x1EE98, "M", "ذ"),
+ (0x1EE99, "M", "ض"),
+ (0x1EE9A, "M", "ظ"),
+ (0x1EE9B, "M", "غ"),
+ (0x1EE9C, "X"),
+ (0x1EEA1, "M", "ب"),
+ (0x1EEA2, "M", "ج"),
+ (0x1EEA3, "M", "د"),
+ (0x1EEA4, "X"),
+ (0x1EEA5, "M", "و"),
+ (0x1EEA6, "M", "ز"),
+ (0x1EEA7, "M", "ح"),
+ (0x1EEA8, "M", "ط"),
+ (0x1EEA9, "M", "ي"),
+ (0x1EEAA, "X"),
+ (0x1EEAB, "M", "ل"),
+ (0x1EEAC, "M", "م"),
+ (0x1EEAD, "M", "ن"),
+ (0x1EEAE, "M", "س"),
+ (0x1EEAF, "M", "ع"),
+ (0x1EEB0, "M", "ف"),
+ (0x1EEB1, "M", "ص"),
+ (0x1EEB2, "M", "ق"),
+ (0x1EEB3, "M", "ر"),
+ (0x1EEB4, "M", "ش"),
+ (0x1EEB5, "M", "ت"),
+ (0x1EEB6, "M", "ث"),
+ (0x1EEB7, "M", "خ"),
+ (0x1EEB8, "M", "ذ"),
+ (0x1EEB9, "M", "ض"),
+ (0x1EEBA, "M", "ظ"),
+ (0x1EEBB, "M", "غ"),
+ (0x1EEBC, "X"),
+ (0x1EEF0, "V"),
+ (0x1EEF2, "X"),
+ (0x1F000, "V"),
+ (0x1F02C, "X"),
+ (0x1F030, "V"),
+ (0x1F094, "X"),
+ (0x1F0A0, "V"),
+ (0x1F0AF, "X"),
+ (0x1F0B1, "V"),
+ (0x1F0C0, "X"),
+ (0x1F0C1, "V"),
+ (0x1F0D0, "X"),
+ (0x1F0D1, "V"),
+ (0x1F0F6, "X"),
+ (0x1F101, "M", "0,"),
+ (0x1F102, "M", "1,"),
+ (0x1F103, "M", "2,"),
+ (0x1F104, "M", "3,"),
+ (0x1F105, "M", "4,"),
+ (0x1F106, "M", "5,"),
+ (0x1F107, "M", "6,"),
+ (0x1F108, "M", "7,"),
+ (0x1F109, "M", "8,"),
+ (0x1F10A, "M", "9,"),
+ (0x1F10B, "V"),
+ (0x1F110, "M", "(a)"),
+ (0x1F111, "M", "(b)"),
+ (0x1F112, "M", "(c)"),
+ (0x1F113, "M", "(d)"),
+ (0x1F114, "M", "(e)"),
+ (0x1F115, "M", "(f)"),
+ (0x1F116, "M", "(g)"),
+ (0x1F117, "M", "(h)"),
+ (0x1F118, "M", "(i)"),
+ (0x1F119, "M", "(j)"),
+ (0x1F11A, "M", "(k)"),
+ (0x1F11B, "M", "(l)"),
+ (0x1F11C, "M", "(m)"),
+ (0x1F11D, "M", "(n)"),
+ (0x1F11E, "M", "(o)"),
+ (0x1F11F, "M", "(p)"),
+ (0x1F120, "M", "(q)"),
+ (0x1F121, "M", "(r)"),
+ (0x1F122, "M", "(s)"),
+ (0x1F123, "M", "(t)"),
+ (0x1F124, "M", "(u)"),
+ (0x1F125, "M", "(v)"),
+ ]
+
+
+def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F126, "M", "(w)"),
+ (0x1F127, "M", "(x)"),
+ (0x1F128, "M", "(y)"),
+ (0x1F129, "M", "(z)"),
+ (0x1F12A, "M", "〔s〕"),
+ (0x1F12B, "M", "c"),
+ (0x1F12C, "M", "r"),
+ (0x1F12D, "M", "cd"),
+ (0x1F12E, "M", "wz"),
+ (0x1F12F, "V"),
+ (0x1F130, "M", "a"),
+ (0x1F131, "M", "b"),
+ (0x1F132, "M", "c"),
+ (0x1F133, "M", "d"),
+ (0x1F134, "M", "e"),
+ (0x1F135, "M", "f"),
+ (0x1F136, "M", "g"),
+ (0x1F137, "M", "h"),
+ (0x1F138, "M", "i"),
+ (0x1F139, "M", "j"),
+ (0x1F13A, "M", "k"),
+ (0x1F13B, "M", "l"),
+ (0x1F13C, "M", "m"),
+ (0x1F13D, "M", "n"),
+ (0x1F13E, "M", "o"),
+ (0x1F13F, "M", "p"),
+ (0x1F140, "M", "q"),
+ (0x1F141, "M", "r"),
+ (0x1F142, "M", "s"),
+ (0x1F143, "M", "t"),
+ (0x1F144, "M", "u"),
+ (0x1F145, "M", "v"),
+ (0x1F146, "M", "w"),
+ (0x1F147, "M", "x"),
+ (0x1F148, "M", "y"),
+ (0x1F149, "M", "z"),
+ (0x1F14A, "M", "hv"),
+ (0x1F14B, "M", "mv"),
+ (0x1F14C, "M", "sd"),
+ (0x1F14D, "M", "ss"),
+ (0x1F14E, "M", "ppv"),
+ (0x1F14F, "M", "wc"),
+ (0x1F150, "V"),
+ (0x1F16A, "M", "mc"),
+ (0x1F16B, "M", "md"),
+ (0x1F16C, "M", "mr"),
+ (0x1F16D, "V"),
+ (0x1F190, "M", "dj"),
+ (0x1F191, "V"),
+ (0x1F1AE, "X"),
+ (0x1F1E6, "V"),
+ (0x1F200, "M", "ほか"),
+ (0x1F201, "M", "ココ"),
+ (0x1F202, "M", "サ"),
+ (0x1F203, "X"),
+ (0x1F210, "M", "手"),
+ (0x1F211, "M", "字"),
+ (0x1F212, "M", "双"),
+ (0x1F213, "M", "デ"),
+ (0x1F214, "M", "二"),
+ (0x1F215, "M", "多"),
+ (0x1F216, "M", "解"),
+ (0x1F217, "M", "天"),
+ (0x1F218, "M", "交"),
+ (0x1F219, "M", "映"),
+ (0x1F21A, "M", "無"),
+ (0x1F21B, "M", "料"),
+ (0x1F21C, "M", "前"),
+ (0x1F21D, "M", "後"),
+ (0x1F21E, "M", "再"),
+ (0x1F21F, "M", "新"),
+ (0x1F220, "M", "初"),
+ (0x1F221, "M", "終"),
+ (0x1F222, "M", "生"),
+ (0x1F223, "M", "販"),
+ (0x1F224, "M", "声"),
+ (0x1F225, "M", "吹"),
+ (0x1F226, "M", "演"),
+ (0x1F227, "M", "投"),
+ (0x1F228, "M", "捕"),
+ (0x1F229, "M", "一"),
+ (0x1F22A, "M", "三"),
+ (0x1F22B, "M", "遊"),
+ (0x1F22C, "M", "左"),
+ (0x1F22D, "M", "中"),
+ (0x1F22E, "M", "右"),
+ (0x1F22F, "M", "指"),
+ (0x1F230, "M", "走"),
+ (0x1F231, "M", "打"),
+ (0x1F232, "M", "禁"),
+ (0x1F233, "M", "空"),
+ (0x1F234, "M", "合"),
+ (0x1F235, "M", "満"),
+ (0x1F236, "M", "有"),
+ (0x1F237, "M", "月"),
+ (0x1F238, "M", "申"),
+ (0x1F239, "M", "割"),
+ (0x1F23A, "M", "営"),
+ (0x1F23B, "M", "配"),
+ (0x1F23C, "X"),
+ ]
+
+
+def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1F240, "M", "〔本〕"),
+ (0x1F241, "M", "〔三〕"),
+ (0x1F242, "M", "〔二〕"),
+ (0x1F243, "M", "〔安〕"),
+ (0x1F244, "M", "〔点〕"),
+ (0x1F245, "M", "〔打〕"),
+ (0x1F246, "M", "〔盗〕"),
+ (0x1F247, "M", "〔勝〕"),
+ (0x1F248, "M", "〔敗〕"),
+ (0x1F249, "X"),
+ (0x1F250, "M", "得"),
+ (0x1F251, "M", "可"),
+ (0x1F252, "X"),
+ (0x1F260, "V"),
+ (0x1F266, "X"),
+ (0x1F300, "V"),
+ (0x1F6D8, "X"),
+ (0x1F6DC, "V"),
+ (0x1F6ED, "X"),
+ (0x1F6F0, "V"),
+ (0x1F6FD, "X"),
+ (0x1F700, "V"),
+ (0x1F777, "X"),
+ (0x1F77B, "V"),
+ (0x1F7DA, "X"),
+ (0x1F7E0, "V"),
+ (0x1F7EC, "X"),
+ (0x1F7F0, "V"),
+ (0x1F7F1, "X"),
+ (0x1F800, "V"),
+ (0x1F80C, "X"),
+ (0x1F810, "V"),
+ (0x1F848, "X"),
+ (0x1F850, "V"),
+ (0x1F85A, "X"),
+ (0x1F860, "V"),
+ (0x1F888, "X"),
+ (0x1F890, "V"),
+ (0x1F8AE, "X"),
+ (0x1F8B0, "V"),
+ (0x1F8BC, "X"),
+ (0x1F8C0, "V"),
+ (0x1F8C2, "X"),
+ (0x1F900, "V"),
+ (0x1FA54, "X"),
+ (0x1FA60, "V"),
+ (0x1FA6E, "X"),
+ (0x1FA70, "V"),
+ (0x1FA7D, "X"),
+ (0x1FA80, "V"),
+ (0x1FA8A, "X"),
+ (0x1FA8F, "V"),
+ (0x1FAC7, "X"),
+ (0x1FACE, "V"),
+ (0x1FADD, "X"),
+ (0x1FADF, "V"),
+ (0x1FAEA, "X"),
+ (0x1FAF0, "V"),
+ (0x1FAF9, "X"),
+ (0x1FB00, "V"),
+ (0x1FB93, "X"),
+ (0x1FB94, "V"),
+ (0x1FBF0, "M", "0"),
+ (0x1FBF1, "M", "1"),
+ (0x1FBF2, "M", "2"),
+ (0x1FBF3, "M", "3"),
+ (0x1FBF4, "M", "4"),
+ (0x1FBF5, "M", "5"),
+ (0x1FBF6, "M", "6"),
+ (0x1FBF7, "M", "7"),
+ (0x1FBF8, "M", "8"),
+ (0x1FBF9, "M", "9"),
+ (0x1FBFA, "X"),
+ (0x20000, "V"),
+ (0x2A6E0, "X"),
+ (0x2A700, "V"),
+ (0x2B73A, "X"),
+ (0x2B740, "V"),
+ (0x2B81E, "X"),
+ (0x2B820, "V"),
+ (0x2CEA2, "X"),
+ (0x2CEB0, "V"),
+ (0x2EBE1, "X"),
+ (0x2EBF0, "V"),
+ (0x2EE5E, "X"),
+ (0x2F800, "M", "丽"),
+ (0x2F801, "M", "丸"),
+ (0x2F802, "M", "乁"),
+ (0x2F803, "M", "𠄢"),
+ (0x2F804, "M", "你"),
+ (0x2F805, "M", "侮"),
+ (0x2F806, "M", "侻"),
+ (0x2F807, "M", "倂"),
+ (0x2F808, "M", "偺"),
+ (0x2F809, "M", "備"),
+ (0x2F80A, "M", "僧"),
+ (0x2F80B, "M", "像"),
+ (0x2F80C, "M", "㒞"),
+ (0x2F80D, "M", "𠘺"),
+ (0x2F80E, "M", "免"),
+ ]
+
+
+def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F80F, "M", "兔"),
+ (0x2F810, "M", "兤"),
+ (0x2F811, "M", "具"),
+ (0x2F812, "M", "𠔜"),
+ (0x2F813, "M", "㒹"),
+ (0x2F814, "M", "內"),
+ (0x2F815, "M", "再"),
+ (0x2F816, "M", "𠕋"),
+ (0x2F817, "M", "冗"),
+ (0x2F818, "M", "冤"),
+ (0x2F819, "M", "仌"),
+ (0x2F81A, "M", "冬"),
+ (0x2F81B, "M", "况"),
+ (0x2F81C, "M", "𩇟"),
+ (0x2F81D, "M", "凵"),
+ (0x2F81E, "M", "刃"),
+ (0x2F81F, "M", "㓟"),
+ (0x2F820, "M", "刻"),
+ (0x2F821, "M", "剆"),
+ (0x2F822, "M", "割"),
+ (0x2F823, "M", "剷"),
+ (0x2F824, "M", "㔕"),
+ (0x2F825, "M", "勇"),
+ (0x2F826, "M", "勉"),
+ (0x2F827, "M", "勤"),
+ (0x2F828, "M", "勺"),
+ (0x2F829, "M", "包"),
+ (0x2F82A, "M", "匆"),
+ (0x2F82B, "M", "北"),
+ (0x2F82C, "M", "卉"),
+ (0x2F82D, "M", "卑"),
+ (0x2F82E, "M", "博"),
+ (0x2F82F, "M", "即"),
+ (0x2F830, "M", "卽"),
+ (0x2F831, "M", "卿"),
+ (0x2F834, "M", "𠨬"),
+ (0x2F835, "M", "灰"),
+ (0x2F836, "M", "及"),
+ (0x2F837, "M", "叟"),
+ (0x2F838, "M", "𠭣"),
+ (0x2F839, "M", "叫"),
+ (0x2F83A, "M", "叱"),
+ (0x2F83B, "M", "吆"),
+ (0x2F83C, "M", "咞"),
+ (0x2F83D, "M", "吸"),
+ (0x2F83E, "M", "呈"),
+ (0x2F83F, "M", "周"),
+ (0x2F840, "M", "咢"),
+ (0x2F841, "M", "哶"),
+ (0x2F842, "M", "唐"),
+ (0x2F843, "M", "啓"),
+ (0x2F844, "M", "啣"),
+ (0x2F845, "M", "善"),
+ (0x2F847, "M", "喙"),
+ (0x2F848, "M", "喫"),
+ (0x2F849, "M", "喳"),
+ (0x2F84A, "M", "嗂"),
+ (0x2F84B, "M", "圖"),
+ (0x2F84C, "M", "嘆"),
+ (0x2F84D, "M", "圗"),
+ (0x2F84E, "M", "噑"),
+ (0x2F84F, "M", "噴"),
+ (0x2F850, "M", "切"),
+ (0x2F851, "M", "壮"),
+ (0x2F852, "M", "城"),
+ (0x2F853, "M", "埴"),
+ (0x2F854, "M", "堍"),
+ (0x2F855, "M", "型"),
+ (0x2F856, "M", "堲"),
+ (0x2F857, "M", "報"),
+ (0x2F858, "M", "墬"),
+ (0x2F859, "M", "𡓤"),
+ (0x2F85A, "M", "売"),
+ (0x2F85B, "M", "壷"),
+ (0x2F85C, "M", "夆"),
+ (0x2F85D, "M", "多"),
+ (0x2F85E, "M", "夢"),
+ (0x2F85F, "M", "奢"),
+ (0x2F860, "M", "𡚨"),
+ (0x2F861, "M", "𡛪"),
+ (0x2F862, "M", "姬"),
+ (0x2F863, "M", "娛"),
+ (0x2F864, "M", "娧"),
+ (0x2F865, "M", "姘"),
+ (0x2F866, "M", "婦"),
+ (0x2F867, "M", "㛮"),
+ (0x2F868, "M", "㛼"),
+ (0x2F869, "M", "嬈"),
+ (0x2F86A, "M", "嬾"),
+ (0x2F86C, "M", "𡧈"),
+ (0x2F86D, "M", "寃"),
+ (0x2F86E, "M", "寘"),
+ (0x2F86F, "M", "寧"),
+ (0x2F870, "M", "寳"),
+ (0x2F871, "M", "𡬘"),
+ (0x2F872, "M", "寿"),
+ (0x2F873, "M", "将"),
+ (0x2F874, "M", "当"),
+ (0x2F875, "M", "尢"),
+ (0x2F876, "M", "㞁"),
+ ]
+
+
+def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F877, "M", "屠"),
+ (0x2F878, "M", "屮"),
+ (0x2F879, "M", "峀"),
+ (0x2F87A, "M", "岍"),
+ (0x2F87B, "M", "𡷤"),
+ (0x2F87C, "M", "嵃"),
+ (0x2F87D, "M", "𡷦"),
+ (0x2F87E, "M", "嵮"),
+ (0x2F87F, "M", "嵫"),
+ (0x2F880, "M", "嵼"),
+ (0x2F881, "M", "巡"),
+ (0x2F882, "M", "巢"),
+ (0x2F883, "M", "㠯"),
+ (0x2F884, "M", "巽"),
+ (0x2F885, "M", "帨"),
+ (0x2F886, "M", "帽"),
+ (0x2F887, "M", "幩"),
+ (0x2F888, "M", "㡢"),
+ (0x2F889, "M", "𢆃"),
+ (0x2F88A, "M", "㡼"),
+ (0x2F88B, "M", "庰"),
+ (0x2F88C, "M", "庳"),
+ (0x2F88D, "M", "庶"),
+ (0x2F88E, "M", "廊"),
+ (0x2F88F, "M", "𪎒"),
+ (0x2F890, "M", "廾"),
+ (0x2F891, "M", "𢌱"),
+ (0x2F893, "M", "舁"),
+ (0x2F894, "M", "弢"),
+ (0x2F896, "M", "㣇"),
+ (0x2F897, "M", "𣊸"),
+ (0x2F898, "M", "𦇚"),
+ (0x2F899, "M", "形"),
+ (0x2F89A, "M", "彫"),
+ (0x2F89B, "M", "㣣"),
+ (0x2F89C, "M", "徚"),
+ (0x2F89D, "M", "忍"),
+ (0x2F89E, "M", "志"),
+ (0x2F89F, "M", "忹"),
+ (0x2F8A0, "M", "悁"),
+ (0x2F8A1, "M", "㤺"),
+ (0x2F8A2, "M", "㤜"),
+ (0x2F8A3, "M", "悔"),
+ (0x2F8A4, "M", "𢛔"),
+ (0x2F8A5, "M", "惇"),
+ (0x2F8A6, "M", "慈"),
+ (0x2F8A7, "M", "慌"),
+ (0x2F8A8, "M", "慎"),
+ (0x2F8A9, "M", "慌"),
+ (0x2F8AA, "M", "慺"),
+ (0x2F8AB, "M", "憎"),
+ (0x2F8AC, "M", "憲"),
+ (0x2F8AD, "M", "憤"),
+ (0x2F8AE, "M", "憯"),
+ (0x2F8AF, "M", "懞"),
+ (0x2F8B0, "M", "懲"),
+ (0x2F8B1, "M", "懶"),
+ (0x2F8B2, "M", "成"),
+ (0x2F8B3, "M", "戛"),
+ (0x2F8B4, "M", "扝"),
+ (0x2F8B5, "M", "抱"),
+ (0x2F8B6, "M", "拔"),
+ (0x2F8B7, "M", "捐"),
+ (0x2F8B8, "M", "𢬌"),
+ (0x2F8B9, "M", "挽"),
+ (0x2F8BA, "M", "拼"),
+ (0x2F8BB, "M", "捨"),
+ (0x2F8BC, "M", "掃"),
+ (0x2F8BD, "M", "揤"),
+ (0x2F8BE, "M", "𢯱"),
+ (0x2F8BF, "M", "搢"),
+ (0x2F8C0, "M", "揅"),
+ (0x2F8C1, "M", "掩"),
+ (0x2F8C2, "M", "㨮"),
+ (0x2F8C3, "M", "摩"),
+ (0x2F8C4, "M", "摾"),
+ (0x2F8C5, "M", "撝"),
+ (0x2F8C6, "M", "摷"),
+ (0x2F8C7, "M", "㩬"),
+ (0x2F8C8, "M", "敏"),
+ (0x2F8C9, "M", "敬"),
+ (0x2F8CA, "M", "𣀊"),
+ (0x2F8CB, "M", "旣"),
+ (0x2F8CC, "M", "書"),
+ (0x2F8CD, "M", "晉"),
+ (0x2F8CE, "M", "㬙"),
+ (0x2F8CF, "M", "暑"),
+ (0x2F8D0, "M", "㬈"),
+ (0x2F8D1, "M", "㫤"),
+ (0x2F8D2, "M", "冒"),
+ (0x2F8D3, "M", "冕"),
+ (0x2F8D4, "M", "最"),
+ (0x2F8D5, "M", "暜"),
+ (0x2F8D6, "M", "肭"),
+ (0x2F8D7, "M", "䏙"),
+ (0x2F8D8, "M", "朗"),
+ (0x2F8D9, "M", "望"),
+ (0x2F8DA, "M", "朡"),
+ (0x2F8DB, "M", "杞"),
+ (0x2F8DC, "M", "杓"),
+ ]
+
+
+def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F8DD, "M", "𣏃"),
+ (0x2F8DE, "M", "㭉"),
+ (0x2F8DF, "M", "柺"),
+ (0x2F8E0, "M", "枅"),
+ (0x2F8E1, "M", "桒"),
+ (0x2F8E2, "M", "梅"),
+ (0x2F8E3, "M", "𣑭"),
+ (0x2F8E4, "M", "梎"),
+ (0x2F8E5, "M", "栟"),
+ (0x2F8E6, "M", "椔"),
+ (0x2F8E7, "M", "㮝"),
+ (0x2F8E8, "M", "楂"),
+ (0x2F8E9, "M", "榣"),
+ (0x2F8EA, "M", "槪"),
+ (0x2F8EB, "M", "檨"),
+ (0x2F8EC, "M", "𣚣"),
+ (0x2F8ED, "M", "櫛"),
+ (0x2F8EE, "M", "㰘"),
+ (0x2F8EF, "M", "次"),
+ (0x2F8F0, "M", "𣢧"),
+ (0x2F8F1, "M", "歔"),
+ (0x2F8F2, "M", "㱎"),
+ (0x2F8F3, "M", "歲"),
+ (0x2F8F4, "M", "殟"),
+ (0x2F8F5, "M", "殺"),
+ (0x2F8F6, "M", "殻"),
+ (0x2F8F7, "M", "𣪍"),
+ (0x2F8F8, "M", "𡴋"),
+ (0x2F8F9, "M", "𣫺"),
+ (0x2F8FA, "M", "汎"),
+ (0x2F8FB, "M", "𣲼"),
+ (0x2F8FC, "M", "沿"),
+ (0x2F8FD, "M", "泍"),
+ (0x2F8FE, "M", "汧"),
+ (0x2F8FF, "M", "洖"),
+ (0x2F900, "M", "派"),
+ (0x2F901, "M", "海"),
+ (0x2F902, "M", "流"),
+ (0x2F903, "M", "浩"),
+ (0x2F904, "M", "浸"),
+ (0x2F905, "M", "涅"),
+ (0x2F906, "M", "𣴞"),
+ (0x2F907, "M", "洴"),
+ (0x2F908, "M", "港"),
+ (0x2F909, "M", "湮"),
+ (0x2F90A, "M", "㴳"),
+ (0x2F90B, "M", "滋"),
+ (0x2F90C, "M", "滇"),
+ (0x2F90D, "M", "𣻑"),
+ (0x2F90E, "M", "淹"),
+ (0x2F90F, "M", "潮"),
+ (0x2F910, "M", "𣽞"),
+ (0x2F911, "M", "𣾎"),
+ (0x2F912, "M", "濆"),
+ (0x2F913, "M", "瀹"),
+ (0x2F914, "M", "瀞"),
+ (0x2F915, "M", "瀛"),
+ (0x2F916, "M", "㶖"),
+ (0x2F917, "M", "灊"),
+ (0x2F918, "M", "災"),
+ (0x2F919, "M", "灷"),
+ (0x2F91A, "M", "炭"),
+ (0x2F91B, "M", "𠔥"),
+ (0x2F91C, "M", "煅"),
+ (0x2F91D, "M", "𤉣"),
+ (0x2F91E, "M", "熜"),
+ (0x2F91F, "M", "𤎫"),
+ (0x2F920, "M", "爨"),
+ (0x2F921, "M", "爵"),
+ (0x2F922, "M", "牐"),
+ (0x2F923, "M", "𤘈"),
+ (0x2F924, "M", "犀"),
+ (0x2F925, "M", "犕"),
+ (0x2F926, "M", "𤜵"),
+ (0x2F927, "M", "𤠔"),
+ (0x2F928, "M", "獺"),
+ (0x2F929, "M", "王"),
+ (0x2F92A, "M", "㺬"),
+ (0x2F92B, "M", "玥"),
+ (0x2F92C, "M", "㺸"),
+ (0x2F92E, "M", "瑇"),
+ (0x2F92F, "M", "瑜"),
+ (0x2F930, "M", "瑱"),
+ (0x2F931, "M", "璅"),
+ (0x2F932, "M", "瓊"),
+ (0x2F933, "M", "㼛"),
+ (0x2F934, "M", "甤"),
+ (0x2F935, "M", "𤰶"),
+ (0x2F936, "M", "甾"),
+ (0x2F937, "M", "𤲒"),
+ (0x2F938, "M", "異"),
+ (0x2F939, "M", "𢆟"),
+ (0x2F93A, "M", "瘐"),
+ (0x2F93B, "M", "𤾡"),
+ (0x2F93C, "M", "𤾸"),
+ (0x2F93D, "M", "𥁄"),
+ (0x2F93E, "M", "㿼"),
+ (0x2F93F, "M", "䀈"),
+ (0x2F940, "M", "直"),
+ (0x2F941, "M", "𥃳"),
+ ]
+
+
+def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F942, "M", "𥃲"),
+ (0x2F943, "M", "𥄙"),
+ (0x2F944, "M", "𥄳"),
+ (0x2F945, "M", "眞"),
+ (0x2F946, "M", "真"),
+ (0x2F948, "M", "睊"),
+ (0x2F949, "M", "䀹"),
+ (0x2F94A, "M", "瞋"),
+ (0x2F94B, "M", "䁆"),
+ (0x2F94C, "M", "䂖"),
+ (0x2F94D, "M", "𥐝"),
+ (0x2F94E, "M", "硎"),
+ (0x2F94F, "M", "碌"),
+ (0x2F950, "M", "磌"),
+ (0x2F951, "M", "䃣"),
+ (0x2F952, "M", "𥘦"),
+ (0x2F953, "M", "祖"),
+ (0x2F954, "M", "𥚚"),
+ (0x2F955, "M", "𥛅"),
+ (0x2F956, "M", "福"),
+ (0x2F957, "M", "秫"),
+ (0x2F958, "M", "䄯"),
+ (0x2F959, "M", "穀"),
+ (0x2F95A, "M", "穊"),
+ (0x2F95B, "M", "穏"),
+ (0x2F95C, "M", "𥥼"),
+ (0x2F95D, "M", "𥪧"),
+ (0x2F95F, "M", "竮"),
+ (0x2F960, "M", "䈂"),
+ (0x2F961, "M", "𥮫"),
+ (0x2F962, "M", "篆"),
+ (0x2F963, "M", "築"),
+ (0x2F964, "M", "䈧"),
+ (0x2F965, "M", "𥲀"),
+ (0x2F966, "M", "糒"),
+ (0x2F967, "M", "䊠"),
+ (0x2F968, "M", "糨"),
+ (0x2F969, "M", "糣"),
+ (0x2F96A, "M", "紀"),
+ (0x2F96B, "M", "𥾆"),
+ (0x2F96C, "M", "絣"),
+ (0x2F96D, "M", "䌁"),
+ (0x2F96E, "M", "緇"),
+ (0x2F96F, "M", "縂"),
+ (0x2F970, "M", "繅"),
+ (0x2F971, "M", "䌴"),
+ (0x2F972, "M", "𦈨"),
+ (0x2F973, "M", "𦉇"),
+ (0x2F974, "M", "䍙"),
+ (0x2F975, "M", "𦋙"),
+ (0x2F976, "M", "罺"),
+ (0x2F977, "M", "𦌾"),
+ (0x2F978, "M", "羕"),
+ (0x2F979, "M", "翺"),
+ (0x2F97A, "M", "者"),
+ (0x2F97B, "M", "𦓚"),
+ (0x2F97C, "M", "𦔣"),
+ (0x2F97D, "M", "聠"),
+ (0x2F97E, "M", "𦖨"),
+ (0x2F97F, "M", "聰"),
+ (0x2F980, "M", "𣍟"),
+ (0x2F981, "M", "䏕"),
+ (0x2F982, "M", "育"),
+ (0x2F983, "M", "脃"),
+ (0x2F984, "M", "䐋"),
+ (0x2F985, "M", "脾"),
+ (0x2F986, "M", "媵"),
+ (0x2F987, "M", "𦞧"),
+ (0x2F988, "M", "𦞵"),
+ (0x2F989, "M", "𣎓"),
+ (0x2F98A, "M", "𣎜"),
+ (0x2F98B, "M", "舁"),
+ (0x2F98C, "M", "舄"),
+ (0x2F98D, "M", "辞"),
+ (0x2F98E, "M", "䑫"),
+ (0x2F98F, "M", "芑"),
+ (0x2F990, "M", "芋"),
+ (0x2F991, "M", "芝"),
+ (0x2F992, "M", "劳"),
+ (0x2F993, "M", "花"),
+ (0x2F994, "M", "芳"),
+ (0x2F995, "M", "芽"),
+ (0x2F996, "M", "苦"),
+ (0x2F997, "M", "𦬼"),
+ (0x2F998, "M", "若"),
+ (0x2F999, "M", "茝"),
+ (0x2F99A, "M", "荣"),
+ (0x2F99B, "M", "莭"),
+ (0x2F99C, "M", "茣"),
+ (0x2F99D, "M", "莽"),
+ (0x2F99E, "M", "菧"),
+ (0x2F99F, "M", "著"),
+ (0x2F9A0, "M", "荓"),
+ (0x2F9A1, "M", "菊"),
+ (0x2F9A2, "M", "菌"),
+ (0x2F9A3, "M", "菜"),
+ (0x2F9A4, "M", "𦰶"),
+ (0x2F9A5, "M", "𦵫"),
+ (0x2F9A6, "M", "𦳕"),
+ (0x2F9A7, "M", "䔫"),
+ ]
+
+
+def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2F9A8, "M", "蓱"),
+ (0x2F9A9, "M", "蓳"),
+ (0x2F9AA, "M", "蔖"),
+ (0x2F9AB, "M", "𧏊"),
+ (0x2F9AC, "M", "蕤"),
+ (0x2F9AD, "M", "𦼬"),
+ (0x2F9AE, "M", "䕝"),
+ (0x2F9AF, "M", "䕡"),
+ (0x2F9B0, "M", "𦾱"),
+ (0x2F9B1, "M", "𧃒"),
+ (0x2F9B2, "M", "䕫"),
+ (0x2F9B3, "M", "虐"),
+ (0x2F9B4, "M", "虜"),
+ (0x2F9B5, "M", "虧"),
+ (0x2F9B6, "M", "虩"),
+ (0x2F9B7, "M", "蚩"),
+ (0x2F9B8, "M", "蚈"),
+ (0x2F9B9, "M", "蜎"),
+ (0x2F9BA, "M", "蛢"),
+ (0x2F9BB, "M", "蝹"),
+ (0x2F9BC, "M", "蜨"),
+ (0x2F9BD, "M", "蝫"),
+ (0x2F9BE, "M", "螆"),
+ (0x2F9BF, "M", "䗗"),
+ (0x2F9C0, "M", "蟡"),
+ (0x2F9C1, "M", "蠁"),
+ (0x2F9C2, "M", "䗹"),
+ (0x2F9C3, "M", "衠"),
+ (0x2F9C4, "M", "衣"),
+ (0x2F9C5, "M", "𧙧"),
+ (0x2F9C6, "M", "裗"),
+ (0x2F9C7, "M", "裞"),
+ (0x2F9C8, "M", "䘵"),
+ (0x2F9C9, "M", "裺"),
+ (0x2F9CA, "M", "㒻"),
+ (0x2F9CB, "M", "𧢮"),
+ (0x2F9CC, "M", "𧥦"),
+ (0x2F9CD, "M", "䚾"),
+ (0x2F9CE, "M", "䛇"),
+ (0x2F9CF, "M", "誠"),
+ (0x2F9D0, "M", "諭"),
+ (0x2F9D1, "M", "變"),
+ (0x2F9D2, "M", "豕"),
+ (0x2F9D3, "M", "𧲨"),
+ (0x2F9D4, "M", "貫"),
+ (0x2F9D5, "M", "賁"),
+ (0x2F9D6, "M", "贛"),
+ (0x2F9D7, "M", "起"),
+ (0x2F9D8, "M", "𧼯"),
+ (0x2F9D9, "M", "𠠄"),
+ (0x2F9DA, "M", "跋"),
+ (0x2F9DB, "M", "趼"),
+ (0x2F9DC, "M", "跰"),
+ (0x2F9DD, "M", "𠣞"),
+ (0x2F9DE, "M", "軔"),
+ (0x2F9DF, "M", "輸"),
+ (0x2F9E0, "M", "𨗒"),
+ (0x2F9E1, "M", "𨗭"),
+ (0x2F9E2, "M", "邔"),
+ (0x2F9E3, "M", "郱"),
+ (0x2F9E4, "M", "鄑"),
+ (0x2F9E5, "M", "𨜮"),
+ (0x2F9E6, "M", "鄛"),
+ (0x2F9E7, "M", "鈸"),
+ (0x2F9E8, "M", "鋗"),
+ (0x2F9E9, "M", "鋘"),
+ (0x2F9EA, "M", "鉼"),
+ (0x2F9EB, "M", "鏹"),
+ (0x2F9EC, "M", "鐕"),
+ (0x2F9ED, "M", "𨯺"),
+ (0x2F9EE, "M", "開"),
+ (0x2F9EF, "M", "䦕"),
+ (0x2F9F0, "M", "閷"),
+ (0x2F9F1, "M", "𨵷"),
+ (0x2F9F2, "M", "䧦"),
+ (0x2F9F3, "M", "雃"),
+ (0x2F9F4, "M", "嶲"),
+ (0x2F9F5, "M", "霣"),
+ (0x2F9F6, "M", "𩅅"),
+ (0x2F9F7, "M", "𩈚"),
+ (0x2F9F8, "M", "䩮"),
+ (0x2F9F9, "M", "䩶"),
+ (0x2F9FA, "M", "韠"),
+ (0x2F9FB, "M", "𩐊"),
+ (0x2F9FC, "M", "䪲"),
+ (0x2F9FD, "M", "𩒖"),
+ (0x2F9FE, "M", "頋"),
+ (0x2FA00, "M", "頩"),
+ (0x2FA01, "M", "𩖶"),
+ (0x2FA02, "M", "飢"),
+ (0x2FA03, "M", "䬳"),
+ (0x2FA04, "M", "餩"),
+ (0x2FA05, "M", "馧"),
+ (0x2FA06, "M", "駂"),
+ (0x2FA07, "M", "駾"),
+ (0x2FA08, "M", "䯎"),
+ (0x2FA09, "M", "𩬰"),
+ (0x2FA0A, "M", "鬒"),
+ (0x2FA0B, "M", "鱀"),
+ (0x2FA0C, "M", "鳽"),
+ ]
+
+
+def _seg_83() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2FA0D, "M", "䳎"),
+ (0x2FA0E, "M", "䳭"),
+ (0x2FA0F, "M", "鵧"),
+ (0x2FA10, "M", "𪃎"),
+ (0x2FA11, "M", "䳸"),
+ (0x2FA12, "M", "𪄅"),
+ (0x2FA13, "M", "𪈎"),
+ (0x2FA14, "M", "𪊑"),
+ (0x2FA15, "M", "麻"),
+ (0x2FA16, "M", "䵖"),
+ (0x2FA17, "M", "黹"),
+ (0x2FA18, "M", "黾"),
+ (0x2FA19, "M", "鼅"),
+ (0x2FA1A, "M", "鼏"),
+ (0x2FA1B, "M", "鼖"),
+ (0x2FA1C, "M", "鼻"),
+ (0x2FA1D, "M", "𪘀"),
+ (0x2FA1E, "X"),
+ (0x30000, "V"),
+ (0x3134B, "X"),
+ (0x31350, "V"),
+ (0x323B0, "X"),
+ (0xE0100, "I"),
+ (0xE01F0, "X"),
+ ]
+
+
+uts46data = tuple(
+ _seg_0()
+ + _seg_1()
+ + _seg_2()
+ + _seg_3()
+ + _seg_4()
+ + _seg_5()
+ + _seg_6()
+ + _seg_7()
+ + _seg_8()
+ + _seg_9()
+ + _seg_10()
+ + _seg_11()
+ + _seg_12()
+ + _seg_13()
+ + _seg_14()
+ + _seg_15()
+ + _seg_16()
+ + _seg_17()
+ + _seg_18()
+ + _seg_19()
+ + _seg_20()
+ + _seg_21()
+ + _seg_22()
+ + _seg_23()
+ + _seg_24()
+ + _seg_25()
+ + _seg_26()
+ + _seg_27()
+ + _seg_28()
+ + _seg_29()
+ + _seg_30()
+ + _seg_31()
+ + _seg_32()
+ + _seg_33()
+ + _seg_34()
+ + _seg_35()
+ + _seg_36()
+ + _seg_37()
+ + _seg_38()
+ + _seg_39()
+ + _seg_40()
+ + _seg_41()
+ + _seg_42()
+ + _seg_43()
+ + _seg_44()
+ + _seg_45()
+ + _seg_46()
+ + _seg_47()
+ + _seg_48()
+ + _seg_49()
+ + _seg_50()
+ + _seg_51()
+ + _seg_52()
+ + _seg_53()
+ + _seg_54()
+ + _seg_55()
+ + _seg_56()
+ + _seg_57()
+ + _seg_58()
+ + _seg_59()
+ + _seg_60()
+ + _seg_61()
+ + _seg_62()
+ + _seg_63()
+ + _seg_64()
+ + _seg_65()
+ + _seg_66()
+ + _seg_67()
+ + _seg_68()
+ + _seg_69()
+ + _seg_70()
+ + _seg_71()
+ + _seg_72()
+ + _seg_73()
+ + _seg_74()
+ + _seg_75()
+ + _seg_76()
+ + _seg_77()
+ + _seg_78()
+ + _seg_79()
+ + _seg_80()
+ + _seg_81()
+ + _seg_82()
+ + _seg_83()
+) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/METADATA"
new file mode 100644
index 0000000..1ab8dd6
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/METADATA"
@@ -0,0 +1,149 @@
+Metadata-Version: 2.4
+Name: multidict
+Version: 6.7.0
+Summary: multidict implementation
+Home-page: https://github.com/aio-libs/multidict
+Author: Andrew Svetlov
+Author-email: andrew.svetlov@gmail.com
+License: Apache License 2.0
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions
+Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict
+Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/
+Project-URL: Docs: RTD, https://multidict.aio-libs.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/multidict
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11"
+Dynamic: license-file
+
+=========
+multidict
+=========
+
+.. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg
+ :target: https://github.com/aio-libs/multidict/actions
+ :alt: GitHub status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg?flag=pytest
+ :target: https://codecov.io/gh/aio-libs/multidict?flags[]=pytest
+ :alt: Coverage metrics
+
+.. image:: https://img.shields.io/pypi/v/multidict.svg
+ :target: https://pypi.org/project/multidict
+ :alt: PyPI
+
+.. image:: https://readthedocs.org/projects/multidict/badge/?version=latest
+ :target: https://multidict.aio-libs.org
+ :alt: Read The Docs build status badge
+
+.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
+ :target: https://codspeed.io/aio-libs/multidict
+ :alt: CodSpeed
+
+.. image:: https://img.shields.io/pypi/pyversions/multidict.svg
+ :target: https://pypi.org/project/multidict
+ :alt: Python versions
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Multidict is dict-like collection of *key-value pairs* where key
+might occur more than once in the container.
+
+Introduction
+------------
+
+*HTTP Headers* and *URL query string* require specific data structure:
+*multidict*. It behaves mostly like a regular ``dict`` but it may have
+several *values* for the same *key* and *preserves insertion ordering*.
+
+The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries).
+
+``multidict`` has four multidict classes:
+``MultiDict``, ``MultiDictProxy``, ``CIMultiDict``
+and ``CIMultiDictProxy``.
+
+Immutable proxies (``MultiDictProxy`` and
+``CIMultiDictProxy``) provide a dynamic view for the
+proxied multidict, the view reflects underlying collection changes. They
+implement the ``collections.abc.Mapping`` interface.
+
+Regular mutable (``MultiDict`` and ``CIMultiDict``) classes
+implement ``collections.abc.MutableMapping`` and allows them to change
+their own content.
+
+
+*Case insensitive* (``CIMultiDict`` and
+``CIMultiDictProxy``) assume the *keys* are case
+insensitive, e.g.::
+
+ >>> dct = CIMultiDict(key='val')
+ >>> 'Key' in dct
+ True
+ >>> dct['Key']
+ 'val'
+
+*Keys* should be ``str`` or ``istr`` instances.
+
+The library has optional C Extensions for speed.
+
+
+License
+-------
+
+Apache 2
+
+Library Installation
+--------------------
+
+.. code-block:: bash
+
+ $ pip install multidict
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the
+tarball will be used to compile the library from source. It requires a C compiler and
+Python headers to be installed.
+
+To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable,
+e.g.:
+
+.. code-block:: bash
+
+ $ MULTIDICT_NO_EXTENSIONS=1 pip install multidict
+
+Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on
+the usage scenario!!!
+
+For extension development, set the ``MULTIDICT_DEBUG_BUILD`` environment variable to compile
+the extensions in debug mode:
+
+.. code-block:: console
+
+ $ MULTIDICT_DEBUG_BUILD=1 pip install multidict
+
+Changelog
+---------
+See `RTD page <http://multidict.aio-libs.org/en/latest/changes>`_.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/RECORD"
new file mode 100644
index 0000000..c695fca
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/RECORD"
@@ -0,0 +1,16 @@
+multidict-6.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+multidict-6.7.0.dist-info/METADATA,sha256=Xw8Ehw9kFzGLwsuOXDXIEO2VNqyHmIldPFNMfTgPn7k,5470
+multidict-6.7.0.dist-info/RECORD,,
+multidict-6.7.0.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101
+multidict-6.7.0.dist-info/licenses/LICENSE,sha256=k9Ealo4vDzY3PECBH_bSDhc_WMPKtYhM1mF7v9eVSSo,611
+multidict-6.7.0.dist-info/top_level.txt,sha256=-euDElkk5_qkmfIJ7WiqCab02ZlSFZWynejKg59qZQQ,10
+multidict/__init__.py,sha256=vrqM7ruZH18zqUQumAaWtGekJFYb_oWvThnAdNuAxg4,1228
+multidict/__pycache__/__init__.cpython-312.pyc,,
+multidict/__pycache__/_abc.cpython-312.pyc,,
+multidict/__pycache__/_compat.cpython-312.pyc,,
+multidict/__pycache__/_multidict_py.cpython-312.pyc,,
+multidict/_abc.py,sha256=e_0JDJi7E6LWS0A3gUJ17SkgDLlmg8ffjfylTu_vboc,2402
+multidict/_compat.py,sha256=TcRjCStk2iIY1_DwDNj8kNpJRQ9rtLj92Xvk1z2G_ak,422
+multidict/_multidict.cp312-win_amd64.pyd,sha256=aIV_ZoTfbrwppO6ulrqT6aIF0oOHdCajZMHguZroMMA,80896
+multidict/_multidict_py.py,sha256=VGQ58P7VOd6lRf3WVAinb62aD16DPdAWRt68qmiJMXE,39955
+multidict/py.typed,sha256=e9bmbH3UFxsabQrnNFPG9qxIXztwbcM6IKDYnvZwprY,15
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/WHEEL"
new file mode 100644
index 0000000..10ac2c2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..8727172
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/licenses/LICENSE"
@@ -0,0 +1,13 @@
+ Copyright 2016 Andrew Svetlov and aio-libs contributors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/top_level.txt"
new file mode 100644
index 0000000..afcecdf
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict-6.7.0.dist-info/top_level.txt"
@@ -0,0 +1 @@
+multidict
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/__init__.py"
new file mode 100644
index 0000000..a688932
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/__init__.py"
@@ -0,0 +1,60 @@
+"""
+Multidict implementation.
+
+HTTP Headers and URL query string require specific data structure:
+multidict. It behaves mostly like a dict but it can have
+several values for the same key.
+"""
+
+from typing import TYPE_CHECKING
+
+from ._abc import MultiMapping, MutableMultiMapping
+from ._compat import USE_EXTENSIONS
+
+__all__ = (
+ "CIMultiDict",
+ "CIMultiDictProxy",
+ "MultiDict",
+ "MultiDictProxy",
+ "MultiMapping",
+ "MutableMultiMapping",
+ "getversion",
+ "istr",
+ "upstr",
+)
+
+__version__ = "6.7.0"
+
+
+if TYPE_CHECKING or not USE_EXTENSIONS:
+ from ._multidict_py import (
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ getversion,
+ istr,
+ )
+else:
+ from collections.abc import ItemsView, KeysView, ValuesView
+
+ from ._multidict import (
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ _ItemsView,
+ _KeysView,
+ _ValuesView,
+ getversion,
+ istr,
+ )
+
+ MultiMapping.register(MultiDictProxy)
+ MutableMultiMapping.register(MultiDict)
+ KeysView.register(_KeysView)
+ ItemsView.register(_ItemsView)
+ ValuesView.register(_ValuesView)
+
+
+upstr = istr
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_abc.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_abc.py"
new file mode 100644
index 0000000..54253e9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_abc.py"
@@ -0,0 +1,73 @@
+import abc
+from collections.abc import Iterable, Mapping, MutableMapping
+from typing import TYPE_CHECKING, Protocol, TypeVar, Union, overload
+
+if TYPE_CHECKING:
+ from ._multidict_py import istr
+else:
+ istr = str
+
+_V = TypeVar("_V")
+_V_co = TypeVar("_V_co", covariant=True)
+_T = TypeVar("_T")
+
+
+class SupportsKeys(Protocol[_V_co]):
+ def keys(self) -> Iterable[str]: ...
+ def __getitem__(self, key: str, /) -> _V_co: ...
+
+
+class SupportsIKeys(Protocol[_V_co]):
+ def keys(self) -> Iterable[istr]: ...
+ def __getitem__(self, key: istr, /) -> _V_co: ...
+
+
+MDArg = Union[SupportsKeys[_V], SupportsIKeys[_V], Iterable[tuple[str, _V]], None]
+
+
+class MultiMapping(Mapping[str, _V_co]):
+ @overload
+ def getall(self, key: str) -> list[_V_co]: ...
+ @overload
+ def getall(self, key: str, default: _T) -> Union[list[_V_co], _T]: ...
+ @abc.abstractmethod
+ def getall(self, key: str, default: _T = ...) -> Union[list[_V_co], _T]:
+ """Return all values for key."""
+
+ @overload
+ def getone(self, key: str) -> _V_co: ...
+ @overload
+ def getone(self, key: str, default: _T) -> Union[_V_co, _T]: ...
+ @abc.abstractmethod
+ def getone(self, key: str, default: _T = ...) -> Union[_V_co, _T]:
+ """Return first value for key."""
+
+
+class MutableMultiMapping(MultiMapping[_V], MutableMapping[str, _V]):
+ @abc.abstractmethod
+ def add(self, key: str, value: _V) -> None:
+ """Add value to list."""
+
+ @abc.abstractmethod
+ def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
+ """Add everything from arg and kwargs to the mapping."""
+
+ @abc.abstractmethod
+ def merge(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
+ """Merge into the mapping, adding non-existing keys."""
+
+ @overload
+ def popone(self, key: str) -> _V: ...
+ @overload
+ def popone(self, key: str, default: _T) -> Union[_V, _T]: ...
+ @abc.abstractmethod
+ def popone(self, key: str, default: _T = ...) -> Union[_V, _T]:
+ """Remove specified key and return the corresponding value."""
+
+ @overload
+ def popall(self, key: str) -> list[_V]: ...
+ @overload
+ def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
+ @abc.abstractmethod
+ def popall(self, key: str, default: _T = ...) -> Union[list[_V], _T]:
+ """Remove all occurrences of key and return the list of corresponding values."""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_compat.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_compat.py"
new file mode 100644
index 0000000..264d327
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_compat.py"
@@ -0,0 +1,15 @@
+import os
+import platform
+
+NO_EXTENSIONS = bool(os.environ.get("MULTIDICT_NO_EXTENSIONS"))
+
+PYPY = platform.python_implementation() == "PyPy"
+
+USE_EXTENSIONS = not NO_EXTENSIONS and not PYPY
+
+if USE_EXTENSIONS:
+ try:
+ from . import _multidict # type: ignore[attr-defined] # noqa: F401
+ except ImportError: # pragma: no cover
+ # FIXME: Refactor for coverage. See #837.
+ USE_EXTENSIONS = False
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_multidict.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_multidict.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..312ca59
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_multidict.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_multidict_py.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_multidict_py.py"
new file mode 100644
index 0000000..6b68d52
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/_multidict_py.py"
@@ -0,0 +1,1242 @@
+import enum
+import functools
+import reprlib
+import sys
+from array import array
+from collections.abc import (
+ ItemsView,
+ Iterable,
+ Iterator,
+ KeysView,
+ Mapping,
+ ValuesView,
+)
+from dataclasses import dataclass
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ ClassVar,
+ Generic,
+ NoReturn,
+ Optional,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
+from ._abc import MDArg, MultiMapping, MutableMultiMapping, SupportsKeys
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ from typing_extensions import Self
+
+
+class istr(str):
+ """Case insensitive str."""
+
+ __is_istr__ = True
+ __istr_identity__: Optional[str] = None
+
+
+_V = TypeVar("_V")
+_T = TypeVar("_T")
+
+_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
+sentinel = _SENTINEL.sentinel
+
+_version = array("Q", [0])
+
+
+class _Iter(Generic[_T]):
+ __slots__ = ("_size", "_iter")
+
+ def __init__(self, size: int, iterator: Iterator[_T]):
+ self._size = size
+ self._iter = iterator
+
+ def __iter__(self) -> Self:
+ return self
+
+ def __next__(self) -> _T:
+ return next(self._iter)
+
+ def __length_hint__(self) -> int:
+ return self._size
+
+
+class _ViewBase(Generic[_V]):
+ def __init__(
+ self,
+ md: "MultiDict[_V]",
+ ):
+ self._md = md
+
+ def __len__(self) -> int:
+ return len(self._md)
+
+
+class _ItemsView(_ViewBase[_V], ItemsView[str, _V]):
+ def __contains__(self, item: object) -> bool:
+ if not isinstance(item, (tuple, list)) or len(item) != 2:
+ return False
+ key, value = item
+ try:
+ identity = self._md._identity(key)
+ except TypeError:
+ return False
+ hash_ = hash(identity)
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity and value == e.value:
+ return True
+ return False
+
+ def __iter__(self) -> _Iter[tuple[str, _V]]:
+ return _Iter(len(self), self._iter(self._md._version))
+
+ def _iter(self, version: int) -> Iterator[tuple[str, _V]]:
+ for e in self._md._keys.iter_entries():
+ if version != self._md._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield self._md._key(e.key), e.value
+
+ @reprlib.recursive_repr()
+ def __repr__(self) -> str:
+ lst = []
+ for e in self._md._keys.iter_entries():
+ lst.append(f"'{e.key}': {e.value!r}")
+ body = ", ".join(lst)
+ return f"<{self.__class__.__name__}({body})>"
+
+ def _parse_item(
+ self, arg: Union[tuple[str, _V], _T]
+ ) -> Optional[tuple[int, str, str, _V]]:
+ if not isinstance(arg, tuple):
+ return None
+ if len(arg) != 2:
+ return None
+ try:
+ identity = self._md._identity(arg[0])
+ return (hash(identity), identity, arg[0], arg[1])
+ except TypeError:
+ return None
+
+ def _tmp_set(self, it: Iterable[_T]) -> set[tuple[str, _V]]:
+ tmp = set()
+ for arg in it:
+ item = self._parse_item(arg)
+ if item is None:
+ continue
+ else:
+ tmp.add((item[1], item[3]))
+ return tmp
+
+ def __and__(self, other: Iterable[Any]) -> set[tuple[str, _V]]:
+ ret = set()
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for arg in it:
+ item = self._parse_item(arg)
+ if item is None:
+ continue
+ hash_, identity, key, value = item
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ e.hash = -1
+ if e.identity == identity and e.value == value:
+ ret.add((e.key, e.value))
+ self._md._keys.restore_hash(hash_)
+ return ret
+
+ def __rand__(self, other: Iterable[_T]) -> set[_T]:
+ ret = set()
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for arg in it:
+ item = self._parse_item(arg)
+ if item is None:
+ continue
+ hash_, identity, key, value = item
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity and e.value == value:
+ ret.add(arg)
+ break
+ return ret
+
+ def __or__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]:
+ ret: set[Union[tuple[str, _V], _T]] = set(self)
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for arg in it:
+ item: Optional[tuple[int, str, str, _V]] = self._parse_item(arg)
+ if item is None:
+ ret.add(arg)
+ continue
+ hash_, identity, key, value = item
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity and e.value == value: # pragma: no branch
+ break
+ else:
+ ret.add(arg)
+ return ret
+
+ def __ror__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]:
+ try:
+ ret: set[Union[tuple[str, _V], _T]] = set(other)
+ except TypeError:
+ return NotImplemented
+ tmp = self._tmp_set(ret)
+
+ for e in self._md._keys.iter_entries():
+ if (e.identity, e.value) not in tmp:
+ ret.add((e.key, e.value))
+ return ret
+
+ def __sub__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]:
+ ret: set[Union[tuple[str, _V], _T]] = set()
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ tmp = self._tmp_set(it)
+
+ for e in self._md._keys.iter_entries():
+ if (e.identity, e.value) not in tmp:
+ ret.add((e.key, e.value))
+
+ return ret
+
+ def __rsub__(self, other: Iterable[_T]) -> set[_T]:
+ ret: set[_T] = set()
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for arg in it:
+ item = self._parse_item(arg)
+ if item is None:
+ ret.add(arg)
+ continue
+
+ hash_, identity, key, value = item
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity and e.value == value: # pragma: no branch
+ break
+ else:
+ ret.add(arg)
+ return ret
+
+ def __xor__(self, other: Iterable[_T]) -> set[Union[tuple[str, _V], _T]]:
+ try:
+ rgt = set(other)
+ except TypeError:
+ return NotImplemented
+ ret: set[Union[tuple[str, _V], _T]] = self - rgt
+ ret |= rgt - self
+ return ret
+
+ __rxor__ = __xor__
+
+ def isdisjoint(self, other: Iterable[tuple[str, _V]]) -> bool:
+ for arg in other:
+ item = self._parse_item(arg)
+ if item is None:
+ continue
+
+ hash_, identity, key, value = item
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity and e.value == value: # pragma: no branch
+ return False
+ return True
+
+
+class _ValuesView(_ViewBase[_V], ValuesView[_V]):
+ def __contains__(self, value: object) -> bool:
+ for e in self._md._keys.iter_entries():
+ if e.value == value:
+ return True
+ return False
+
+ def __iter__(self) -> _Iter[_V]:
+ return _Iter(len(self), self._iter(self._md._version))
+
+ def _iter(self, version: int) -> Iterator[_V]:
+ for e in self._md._keys.iter_entries():
+ if version != self._md._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield e.value
+
+ @reprlib.recursive_repr()
+ def __repr__(self) -> str:
+ lst = []
+ for e in self._md._keys.iter_entries():
+ lst.append(repr(e.value))
+ body = ", ".join(lst)
+ return f"<{self.__class__.__name__}({body})>"
+
+
+class _KeysView(_ViewBase[_V], KeysView[str]):
+ def __contains__(self, key: object) -> bool:
+ if not isinstance(key, str):
+ return False
+ identity = self._md._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ return True
+ return False
+
+ def __iter__(self) -> _Iter[str]:
+ return _Iter(len(self), self._iter(self._md._version))
+
+ def _iter(self, version: int) -> Iterator[str]:
+ for e in self._md._keys.iter_entries():
+ if version != self._md._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield self._md._key(e.key)
+
+ def __repr__(self) -> str:
+ lst = []
+ for e in self._md._keys.iter_entries():
+ lst.append(f"'{e.key}'")
+ body = ", ".join(lst)
+ return f"<{self.__class__.__name__}({body})>"
+
+ def __and__(self, other: Iterable[object]) -> set[str]:
+ ret = set()
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for key in it:
+ if not isinstance(key, str):
+ continue
+ identity = self._md._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ ret.add(e.key)
+ break
+ return ret
+
+ def __rand__(self, other: Iterable[_T]) -> set[_T]:
+ ret = set()
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for key in it:
+ if not isinstance(key, str):
+ continue
+ if key in self._md:
+ ret.add(key)
+ return cast(set[_T], ret)
+
+ def __or__(self, other: Iterable[_T]) -> set[Union[str, _T]]:
+ ret: set[Union[str, _T]] = set(self)
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for key in it:
+ if not isinstance(key, str):
+ ret.add(key)
+ continue
+ if key not in self._md:
+ ret.add(key)
+ return ret
+
+ def __ror__(self, other: Iterable[_T]) -> set[Union[str, _T]]:
+ try:
+ ret: set[Union[str, _T]] = set(other)
+ except TypeError:
+ return NotImplemented
+
+ tmp = set()
+ for key in ret:
+ if not isinstance(key, str):
+ continue
+ identity = self._md._identity(key)
+ tmp.add(identity)
+
+ for e in self._md._keys.iter_entries():
+ if e.identity not in tmp:
+ ret.add(e.key)
+ return ret
+
+ def __sub__(self, other: Iterable[object]) -> set[str]:
+ ret = set(self)
+ try:
+ it = iter(other)
+ except TypeError:
+ return NotImplemented
+ for key in it:
+ if not isinstance(key, str):
+ continue
+ identity = self._md._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._md._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ ret.discard(e.key)
+ break
+ return ret
+
+ def __rsub__(self, other: Iterable[_T]) -> set[_T]:
+ try:
+ ret: set[_T] = set(other)
+ except TypeError:
+ return NotImplemented
+ for key in other:
+ if not isinstance(key, str):
+ continue
+ if key in self._md:
+ ret.discard(key) # type: ignore[arg-type]
+ return ret
+
+ def __xor__(self, other: Iterable[_T]) -> set[Union[str, _T]]:
+ try:
+ rgt = set(other)
+ except TypeError:
+ return NotImplemented
+ ret: set[Union[str, _T]] = self - rgt # type: ignore[assignment]
+ ret |= rgt - self
+ return ret
+
+ __rxor__ = __xor__
+
+ def isdisjoint(self, other: Iterable[object]) -> bool:
+ for key in other:
+ if not isinstance(key, str):
+ continue
+ if key in self._md:
+ return False
+ return True
+
+
+class _CSMixin:
+ _ci: ClassVar[bool] = False
+
+ def _key(self, key: str) -> str:
+ return key
+
+ def _identity(self, key: str) -> str:
+ if isinstance(key, str):
+ return key
+ else:
+ raise TypeError("MultiDict keys should be either str or subclasses of str")
+
+
+class _CIMixin:
+ _ci: ClassVar[bool] = True
+
+ def _key(self, key: str) -> str:
+ if type(key) is istr:
+ return key
+ else:
+ return istr(key)
+
+ def _identity(self, key: str) -> str:
+ if isinstance(key, istr):
+ ret = key.__istr_identity__
+ if ret is None:
+ ret = key.lower()
+ key.__istr_identity__ = ret
+ return ret
+ if isinstance(key, str):
+ return key.lower()
+ else:
+ raise TypeError("MultiDict keys should be either str or subclasses of str")
+
+
+def estimate_log2_keysize(n: int) -> int:
+ # 7 == HT_MINSIZE - 1
+ return (((n * 3 + 1) // 2) | 7).bit_length()
+
+
+@dataclass
+class _Entry(Generic[_V]):
+ hash: int
+ identity: str
+ key: str
+ value: _V
+
+
+@dataclass
+class _HtKeys(Generic[_V]): # type: ignore[misc]
+ LOG_MINSIZE: ClassVar[int] = 3
+ MINSIZE: ClassVar[int] = 8
+ PREALLOCATED_INDICES: ClassVar[dict[int, array]] = { # type: ignore[type-arg]
+ log2_size: array(
+ "b" if log2_size < 8 else "h", (-1 for i in range(1 << log2_size))
+ )
+ for log2_size in range(3, 10)
+ }
+
+ log2_size: int
+ usable: int
+
+ indices: array # type: ignore[type-arg] # in py3.9 array is not generic
+ entries: list[Optional[_Entry[_V]]]
+
+ @functools.cached_property
+ def nslots(self) -> int:
+ return 1 << self.log2_size
+
+ @functools.cached_property
+ def mask(self) -> int:
+ return self.nslots - 1
+
+ if sys.implementation.name != "pypy":
+
+ def __sizeof__(self) -> int:
+ return (
+ object.__sizeof__(self)
+ + sys.getsizeof(self.indices)
+ + sys.getsizeof(self.entries)
+ )
+
+ @classmethod
+ def new(cls, log2_size: int, entries: list[Optional[_Entry[_V]]]) -> Self:
+ size = 1 << log2_size
+ usable = (size << 1) // 3
+ if log2_size < 10:
+ indices = cls.PREALLOCATED_INDICES[log2_size].__copy__()
+ elif log2_size < 16:
+ indices = array("h", (-1 for i in range(size)))
+ elif log2_size < 32:
+ indices = array("l", (-1 for i in range(size)))
+ else: # pragma: no cover # don't test huge multidicts
+ indices = array("q", (-1 for i in range(size)))
+ ret = cls(
+ log2_size=log2_size,
+ usable=usable,
+ indices=indices,
+ entries=entries,
+ )
+ return ret
+
+ def clone(self) -> "_HtKeys[_V]":
+ entries = [
+ _Entry(e.hash, e.identity, e.key, e.value) if e is not None else None
+ for e in self.entries
+ ]
+
+ return _HtKeys(
+ log2_size=self.log2_size,
+ usable=self.usable,
+ indices=self.indices.__copy__(),
+ entries=entries,
+ )
+
+ def build_indices(self, update: bool) -> None:
+ mask = self.mask
+ indices = self.indices
+ for idx, e in enumerate(self.entries):
+ assert e is not None
+ hash_ = e.hash
+ if update:
+ if hash_ == -1:
+ hash_ = hash(e.identity)
+ else:
+ assert hash_ != -1
+ i = hash_ & mask
+ perturb = hash_ & sys.maxsize
+ while indices[i] != -1:
+ perturb >>= 5
+ i = mask & (i * 5 + perturb + 1)
+ indices[i] = idx
+
+ def find_empty_slot(self, hash_: int) -> int:
+ mask = self.mask
+ indices = self.indices
+ i = hash_ & mask
+ perturb = hash_ & sys.maxsize
+ ix = indices[i]
+ while ix != -1:
+ perturb >>= 5
+ i = (i * 5 + perturb + 1) & mask
+ ix = indices[i]
+ return i
+
+ def iter_hash(self, hash_: int) -> Iterator[tuple[int, int, _Entry[_V]]]:
+ mask = self.mask
+ indices = self.indices
+ entries = self.entries
+ i = hash_ & mask
+ perturb = hash_ & sys.maxsize
+ ix = indices[i]
+ while ix != -1:
+ if ix != -2:
+ e = entries[ix]
+ if e.hash == hash_:
+ yield i, ix, e
+ perturb >>= 5
+ i = (i * 5 + perturb + 1) & mask
+ ix = indices[i]
+
+ def del_idx(self, hash_: int, idx: int) -> None:
+ mask = self.mask
+ indices = self.indices
+ i = hash_ & mask
+ perturb = hash_ & sys.maxsize
+ ix = indices[i]
+ while ix != idx:
+ perturb >>= 5
+ i = (i * 5 + perturb + 1) & mask
+ ix = indices[i]
+ indices[i] = -2
+
+ def iter_entries(self) -> Iterator[_Entry[_V]]:
+ return filter(None, self.entries)
+
+ def restore_hash(self, hash_: int) -> None:
+ mask = self.mask
+ indices = self.indices
+ entries = self.entries
+ i = hash_ & mask
+ perturb = hash_ & sys.maxsize
+ ix = indices[i]
+ while ix != -1:
+ if ix != -2:
+ entry = entries[ix]
+ if entry.hash == -1:
+ entry.hash = hash_
+ perturb >>= 5
+ i = (i * 5 + perturb + 1) & mask
+ ix = indices[i]
+
+
+class MultiDict(_CSMixin, MutableMultiMapping[_V]):
+ """Dictionary with the support for duplicate keys."""
+
+ __slots__ = ("_keys", "_used", "_version")
+
+ def __init__(self, arg: MDArg[_V] = None, /, **kwargs: _V):
+ self._used = 0
+ v = _version
+ v[0] += 1
+ self._version = v[0]
+ if not kwargs:
+ md = None
+ if isinstance(arg, MultiDictProxy):
+ md = arg._md
+ elif isinstance(arg, MultiDict):
+ md = arg
+ if md is not None and md._ci is self._ci:
+ self._from_md(md)
+ return
+
+ it = self._parse_args(arg, kwargs)
+ log2_size = estimate_log2_keysize(cast(int, next(it)))
+ if log2_size > 17: # pragma: no cover
+ # Don't overallocate really huge keys space in init
+ log2_size = 17
+ self._keys: _HtKeys[_V] = _HtKeys.new(log2_size, [])
+ self._extend_items(cast(Iterator[_Entry[_V]], it))
+
+ def _from_md(self, md: "MultiDict[_V]") -> None:
+ # Copy everything as-is without compacting the new multidict,
+ # otherwise it requires reindexing
+ self._keys = md._keys.clone()
+ self._used = md._used
+
+ @overload
+ def getall(self, key: str) -> list[_V]: ...
+ @overload
+ def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
+ def getall(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[list[_V], _T]:
+ """Return a list of all values matching the key."""
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ res = []
+ restore = []
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ res.append(e.value)
+ e.hash = -1
+ restore.append(idx)
+
+ if res:
+ entries = self._keys.entries
+ for idx in restore:
+ entries[idx].hash = hash_ # type: ignore[union-attr]
+ return res
+ if not res and default is not sentinel:
+ return default
+ raise KeyError("Key not found: %r" % key)
+
+ @overload
+ def getone(self, key: str) -> _V: ...
+ @overload
+ def getone(self, key: str, default: _T) -> Union[_V, _T]: ...
+ def getone(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[_V, _T]:
+ """Get first value matching the key.
+
+ Raises KeyError if the key is not found and no default is provided.
+ """
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ return e.value
+ if default is not sentinel:
+ return default
+ raise KeyError("Key not found: %r" % key)
+
+ # Mapping interface #
+
+ def __getitem__(self, key: str) -> _V:
+ return self.getone(key)
+
+ @overload
+ def get(self, key: str, /) -> Union[_V, None]: ...
+ @overload
+ def get(self, key: str, /, default: _T) -> Union[_V, _T]: ...
+ def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]:
+ """Get first value matching the key.
+
+ If the key is not found, returns the default (or None if no default is provided)
+ """
+ return self.getone(key, default)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self.keys())
+
+ def __len__(self) -> int:
+ return self._used
+
+ def keys(self) -> KeysView[str]:
+ """Return a new view of the dictionary's keys."""
+ return _KeysView(self)
+
+ def items(self) -> ItemsView[str, _V]:
+ """Return a new view of the dictionary's items *(key, value) pairs)."""
+ return _ItemsView(self)
+
+ def values(self) -> _ValuesView[_V]:
+ """Return a new view of the dictionary's values."""
+ return _ValuesView(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ if isinstance(other, MultiDictProxy):
+ return self == other._md
+ if isinstance(other, MultiDict):
+ lft = self._keys
+ rht = other._keys
+ if self._used != other._used:
+ return False
+ for e1, e2 in zip(lft.iter_entries(), rht.iter_entries()):
+ if e1.identity != e2.identity or e1.value != e2.value:
+ return False
+ return True
+ if self._used != len(other):
+ return False
+ for k, v in self.items():
+ nv = other.get(k, sentinel)
+ if v != nv:
+ return False
+ return True
+
+ def __contains__(self, key: object) -> bool:
+ if not isinstance(key, str):
+ return False
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ return True
+ return False
+
+ @reprlib.recursive_repr()
+ def __repr__(self) -> str:
+ body = ", ".join(f"'{e.key}': {e.value!r}" for e in self._keys.iter_entries())
+ return f"<{self.__class__.__name__}({body})>"
+
+ if sys.implementation.name != "pypy":
+
+ def __sizeof__(self) -> int:
+ return object.__sizeof__(self) + sys.getsizeof(self._keys)
+
+ def __reduce__(self) -> tuple[type[Self], tuple[list[tuple[str, _V]]]]:
+ return (self.__class__, (list(self.items()),))
+
+ def add(self, key: str, value: _V) -> None:
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ self._add_with_hash(_Entry(hash_, identity, key, value))
+ self._incr_version()
+
+ def copy(self) -> Self:
+ """Return a copy of itself."""
+ cls = self.__class__
+ return cls(self)
+
+ __copy__ = copy
+
+ def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
+ """Extend current MultiDict with more values.
+
+ This method must be used instead of update.
+ """
+ it = self._parse_args(arg, kwargs)
+ newsize = self._used + cast(int, next(it))
+ self._resize(estimate_log2_keysize(newsize), False)
+ self._extend_items(cast(Iterator[_Entry[_V]], it))
+
+ def _parse_args(
+ self,
+ arg: MDArg[_V],
+ kwargs: Mapping[str, _V],
+ ) -> Iterator[Union[int, _Entry[_V]]]:
+ identity_func = self._identity
+ if arg:
+ if isinstance(arg, MultiDictProxy):
+ arg = arg._md
+ if isinstance(arg, MultiDict):
+ yield len(arg) + len(kwargs)
+ if self._ci is not arg._ci:
+ for e in arg._keys.iter_entries():
+ identity = identity_func(e.key)
+ yield _Entry(hash(identity), identity, e.key, e.value)
+ else:
+ for e in arg._keys.iter_entries():
+ yield _Entry(e.hash, e.identity, e.key, e.value)
+ if kwargs:
+ for key, value in kwargs.items():
+ identity = identity_func(key)
+ yield _Entry(hash(identity), identity, key, value)
+ else:
+ if hasattr(arg, "keys"):
+ arg = cast(SupportsKeys[_V], arg)
+ arg = [(k, arg[k]) for k in arg.keys()]
+ if kwargs:
+ arg = list(arg)
+ arg.extend(list(kwargs.items()))
+ try:
+ yield len(arg) + len(kwargs) # type: ignore[arg-type]
+ except TypeError:
+ yield 0
+ for pos, item in enumerate(arg):
+ if not len(item) == 2:
+ raise ValueError(
+ f"multidict update sequence element #{pos}"
+ f"has length {len(item)}; 2 is required"
+ )
+ identity = identity_func(item[0])
+ yield _Entry(hash(identity), identity, item[0], item[1])
+ else:
+ yield len(kwargs)
+ for key, value in kwargs.items():
+ identity = identity_func(key)
+ yield _Entry(hash(identity), identity, key, value)
+
+ def _extend_items(self, items: Iterable[_Entry[_V]]) -> None:
+ for e in items:
+ self._add_with_hash(e)
+ self._incr_version()
+
+ def clear(self) -> None:
+ """Remove all items from MultiDict."""
+ self._used = 0
+ self._keys = _HtKeys.new(_HtKeys.LOG_MINSIZE, [])
+ self._incr_version()
+
+ # Mapping interface #
+
+ def __setitem__(self, key: str, value: _V) -> None:
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ found = False
+
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ if not found:
+ e.key = key
+ e.value = value
+ e.hash = -1
+ found = True
+ self._incr_version()
+ elif e.hash != -1: # pragma: no branch
+ self._del_at(slot, idx)
+
+ if not found:
+ self._add_with_hash(_Entry(hash_, identity, key, value))
+ else:
+ self._keys.restore_hash(hash_)
+
+ def __delitem__(self, key: str) -> None:
+ found = False
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ self._del_at(slot, idx)
+ found = True
+ if not found:
+ raise KeyError(key)
+ else:
+ self._incr_version()
+
+ @overload
+ def setdefault(
+ self: "MultiDict[Union[_T, None]]", key: str, default: None = None
+ ) -> Union[_T, None]: ...
+ @overload
+ def setdefault(self, key: str, default: _V) -> _V: ...
+ def setdefault(self, key: str, default: Union[_V, None] = None) -> Union[_V, None]: # type: ignore[misc]
+ """Return value for key, set value to default if key is not present."""
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ return e.value
+ self.add(key, default) # type: ignore[arg-type]
+ return default
+
+ @overload
+ def popone(self, key: str) -> _V: ...
+ @overload
+ def popone(self, key: str, default: _T) -> Union[_V, _T]: ...
+ def popone(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[_V, _T]:
+ """Remove specified key and return the corresponding value.
+
+ If key is not found, d is returned if given, otherwise
+ KeyError is raised.
+
+ """
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ value = e.value
+ self._del_at(slot, idx)
+ self._incr_version()
+ return value
+ if default is sentinel:
+ raise KeyError(key)
+ else:
+ return default
+
+ # Type checking will inherit signature for pop() if we don't confuse it here.
+ if not TYPE_CHECKING:
+ pop = popone
+
+ @overload
+ def popall(self, key: str) -> list[_V]: ...
+ @overload
+ def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
+ def popall(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[list[_V], _T]:
+ """Remove all occurrences of key and return the list of corresponding
+ values.
+
+ If key is not found, default is returned if given, otherwise
+ KeyError is raised.
+
+ """
+ found = False
+ identity = self._identity(key)
+ hash_ = hash(identity)
+ ret = []
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ found = True
+ ret.append(e.value)
+ self._del_at(slot, idx)
+ self._incr_version()
+
+ if not found:
+ if default is sentinel:
+ raise KeyError(key)
+ else:
+ return default
+ else:
+ return ret
+
+ def popitem(self) -> tuple[str, _V]:
+ """Remove and return an arbitrary (key, value) pair."""
+ if self._used <= 0:
+ raise KeyError("empty multidict")
+
+ pos = len(self._keys.entries) - 1
+ entry = self._keys.entries.pop()
+
+ while entry is None:
+ pos -= 1
+ entry = self._keys.entries.pop()
+
+ ret = self._key(entry.key), entry.value
+ self._keys.del_idx(entry.hash, pos)
+ self._used -= 1
+ self._incr_version()
+ return ret
+
+ def update(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
+ """Update the dictionary, overwriting existing keys."""
+ it = self._parse_args(arg, kwargs)
+ newsize = self._used + cast(int, next(it))
+ log2_size = estimate_log2_keysize(newsize)
+ if log2_size > 17: # pragma: no cover
+ # Don't overallocate really huge keys space in update,
+ # duplicate keys could reduce the resulting anount of entries
+ log2_size = 17
+ if log2_size > self._keys.log2_size:
+ self._resize(log2_size, False)
+ try:
+ self._update_items(cast(Iterator[_Entry[_V]], it))
+ finally:
+ self._post_update()
+
+ def _update_items(self, items: Iterator[_Entry[_V]]) -> None:
+ for entry in items:
+ found = False
+ hash_ = entry.hash
+ identity = entry.identity
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ if not found:
+ found = True
+ e.key = entry.key
+ e.value = entry.value
+ e.hash = -1
+ else:
+ self._del_at_for_upd(e)
+ if not found:
+ self._add_with_hash_for_upd(entry)
+
+ def _post_update(self) -> None:
+ keys = self._keys
+ indices = keys.indices
+ entries = keys.entries
+ for slot in range(keys.nslots):
+ idx = indices[slot]
+ if idx >= 0:
+ e2 = entries[idx]
+ assert e2 is not None
+ if e2.key is None:
+ entries[idx] = None
+ indices[slot] = -2
+ self._used -= 1
+ if e2.hash == -1:
+ e2.hash = hash(e2.identity)
+
+ self._incr_version()
+
+ def merge(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
+ """Merge into the dictionary, adding non-existing keys."""
+ it = self._parse_args(arg, kwargs)
+ newsize = self._used + cast(int, next(it))
+ log2_size = estimate_log2_keysize(newsize)
+ if log2_size > 17: # pragma: no cover
+ # Don't overallocate really huge keys space in update,
+ # duplicate keys could reduce the resulting anount of entries
+ log2_size = 17
+ if log2_size > self._keys.log2_size:
+ self._resize(log2_size, False)
+ try:
+ self._merge_items(cast(Iterator[_Entry[_V]], it))
+ finally:
+ self._post_update()
+
+ def _merge_items(self, items: Iterator[_Entry[_V]]) -> None:
+ for entry in items:
+ hash_ = entry.hash
+ identity = entry.identity
+ for slot, idx, e in self._keys.iter_hash(hash_):
+ if e.identity == identity: # pragma: no branch
+ break
+ else:
+ self._add_with_hash_for_upd(entry)
+
+ def _incr_version(self) -> None:
+ v = _version
+ v[0] += 1
+ self._version = v[0]
+
+ def _resize(self, log2_newsize: int, update: bool) -> None:
+ oldkeys = self._keys
+ newentries = self._used
+
+ if len(oldkeys.entries) == newentries:
+ entries = oldkeys.entries
+ else:
+ entries = [e for e in oldkeys.entries if e is not None]
+ newkeys: _HtKeys[_V] = _HtKeys.new(log2_newsize, entries)
+ newkeys.usable -= newentries
+ newkeys.build_indices(update)
+ self._keys = newkeys
+
+ def _add_with_hash(self, entry: _Entry[_V]) -> None:
+ if self._keys.usable <= 0:
+ self._resize((self._used * 3 | _HtKeys.MINSIZE - 1).bit_length(), False)
+ keys = self._keys
+ slot = keys.find_empty_slot(entry.hash)
+ keys.indices[slot] = len(keys.entries)
+ keys.entries.append(entry)
+ self._incr_version()
+ self._used += 1
+ keys.usable -= 1
+
+ def _add_with_hash_for_upd(self, entry: _Entry[_V]) -> None:
+ if self._keys.usable <= 0:
+ self._resize((self._used * 3 | _HtKeys.MINSIZE - 1).bit_length(), True)
+ keys = self._keys
+ slot = keys.find_empty_slot(entry.hash)
+ keys.indices[slot] = len(keys.entries)
+ entry.hash = -1
+ keys.entries.append(entry)
+ self._incr_version()
+ self._used += 1
+ keys.usable -= 1
+
+ def _del_at(self, slot: int, idx: int) -> None:
+ self._keys.entries[idx] = None
+ self._keys.indices[slot] = -2
+ self._used -= 1
+
+ def _del_at_for_upd(self, entry: _Entry[_V]) -> None:
+ entry.key = None # type: ignore[assignment]
+ entry.value = None # type: ignore[assignment]
+
+
+class CIMultiDict(_CIMixin, MultiDict[_V]):
+ """Dictionary with the support for duplicate case-insensitive keys."""
+
+
+class MultiDictProxy(_CSMixin, MultiMapping[_V]):
+ """Read-only proxy for MultiDict instance."""
+
+ __slots__ = ("_md",)
+
+ _md: MultiDict[_V]
+
+ def __init__(self, arg: Union[MultiDict[_V], "MultiDictProxy[_V]"]):
+ if not isinstance(arg, (MultiDict, MultiDictProxy)):
+ raise TypeError(
+ f"ctor requires MultiDict or MultiDictProxy instance, not {type(arg)}"
+ )
+ if isinstance(arg, MultiDictProxy):
+ self._md = arg._md
+ else:
+ self._md = arg
+
+ def __reduce__(self) -> NoReturn:
+ raise TypeError(f"can't pickle {self.__class__.__name__} objects")
+
+ @overload
+ def getall(self, key: str) -> list[_V]: ...
+ @overload
+ def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
+ def getall(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[list[_V], _T]:
+ """Return a list of all values matching the key."""
+ if default is not sentinel:
+ return self._md.getall(key, default)
+ else:
+ return self._md.getall(key)
+
+ @overload
+ def getone(self, key: str) -> _V: ...
+ @overload
+ def getone(self, key: str, default: _T) -> Union[_V, _T]: ...
+ def getone(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[_V, _T]:
+ """Get first value matching the key.
+
+ Raises KeyError if the key is not found and no default is provided.
+ """
+ if default is not sentinel:
+ return self._md.getone(key, default)
+ else:
+ return self._md.getone(key)
+
+ # Mapping interface #
+
+ def __getitem__(self, key: str) -> _V:
+ return self.getone(key)
+
+ @overload
+ def get(self, key: str, /) -> Union[_V, None]: ...
+ @overload
+ def get(self, key: str, /, default: _T) -> Union[_V, _T]: ...
+ def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]:
+ """Get first value matching the key.
+
+ If the key is not found, returns the default (or None if no default is provided)
+ """
+ return self._md.getone(key, default)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._md.keys())
+
+ def __len__(self) -> int:
+ return len(self._md)
+
+ def keys(self) -> KeysView[str]:
+ """Return a new view of the dictionary's keys."""
+ return self._md.keys()
+
+ def items(self) -> ItemsView[str, _V]:
+ """Return a new view of the dictionary's items *(key, value) pairs)."""
+ return self._md.items()
+
+ def values(self) -> _ValuesView[_V]:
+ """Return a new view of the dictionary's values."""
+ return self._md.values()
+
+ def __eq__(self, other: object) -> bool:
+ return self._md == other
+
+ def __contains__(self, key: object) -> bool:
+ return key in self._md
+
+ @reprlib.recursive_repr()
+ def __repr__(self) -> str:
+ body = ", ".join(f"'{k}': {v!r}" for k, v in self.items())
+ return f"<{self.__class__.__name__}({body})>"
+
+ def copy(self) -> MultiDict[_V]:
+ """Return a copy of itself."""
+ return MultiDict(self._md)
+
+
+class CIMultiDictProxy(_CIMixin, MultiDictProxy[_V]):
+ """Read-only proxy for CIMultiDict instance."""
+
+ def __init__(self, arg: Union[MultiDict[_V], MultiDictProxy[_V]]):
+ if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)):
+ raise TypeError(
+ "ctor requires CIMultiDict or CIMultiDictProxy instance"
+ f", not {type(arg)}"
+ )
+
+ super().__init__(arg)
+
+ def copy(self) -> CIMultiDict[_V]:
+ """Return a copy of itself."""
+ return CIMultiDict(self._md)
+
+
+def getversion(md: Union[MultiDict[object], MultiDictProxy[object]]) -> int:
+ if isinstance(md, MultiDictProxy):
+ md = md._md
+ elif not isinstance(md, MultiDict):
+ raise TypeError("Parameter should be multidict or proxy")
+ return md._version
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/py.typed"
new file mode 100644
index 0000000..dfe8cc0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/multidict/py.typed"
@@ -0,0 +1 @@
+PEP-561 marker.
\ No newline at end of file
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/METADATA"
new file mode 100644
index 0000000..0dd40dc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/METADATA"
@@ -0,0 +1,443 @@
+Metadata-Version: 2.4
+Name: propcache
+Version: 0.4.1
+Summary: Accelerated property cache
+Home-page: https://github.com/aio-libs/propcache
+Author: Andrew Svetlov
+Author-email: andrew.svetlov@gmail.com
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache-2.0
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: GitHub Workflows, https://github.com/aio-libs/propcache/actions?query=branch:master
+Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/propcache
+Project-URL: Docs: Changelog, https://propcache.readthedocs.io/en/latest/changes/
+Project-URL: Docs: RTD, https://propcache.readthedocs.io
+Project-URL: GitHub: issues, https://github.com/aio-libs/propcache/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/propcache
+Keywords: cython,cext,propcache
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Cython
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: NOTICE
+Dynamic: license-file
+
+propcache
+=========
+
+The module provides a fast implementation of cached properties for Python 3.9+.
+
+.. image:: https://github.com/aio-libs/propcache/actions/workflows/ci-cd.yml/badge.svg
+ :target: https://github.com/aio-libs/propcache/actions?query=workflow%3ACI
+ :align: right
+
+.. image:: https://codecov.io/gh/aio-libs/propcache/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/propcache
+
+.. image:: https://badge.fury.io/py/propcache.svg
+ :target: https://badge.fury.io/py/propcache
+
+
+.. image:: https://readthedocs.org/projects/propcache/badge/?version=latest
+ :target: https://propcache.readthedocs.io
+
+
+.. image:: https://img.shields.io/pypi/pyversions/propcache.svg
+ :target: https://pypi.python.org/pypi/propcache
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+Introduction
+------------
+
+The API is designed to be nearly identical to the built-in ``functools.cached_property`` class,
+except for the additional ``under_cached_property`` class which uses ``self._cache``
+instead of ``self.__dict__`` to store the cached values and prevents ``__set__`` from being called.
+
+For full documentation please read https://propcache.readthedocs.io.
+
+Installation
+------------
+
+::
+
+ $ pip install propcache
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``propcache`` on another operating system where wheels are not provided,
+the the tarball will be used to compile the library from
+the source code. It requires a C compiler and and Python headers installed.
+
+To skip the compilation you must explicitly opt-in by using a PEP 517
+configuration setting ``pure-python``, or setting the ``PROPCACHE_NO_EXTENSIONS``
+environment variable to a non-empty value, e.g.:
+
+.. code-block:: console
+
+ $ pip install propcache --config-settings=pure-python=false
+
+Please note that the pure-Python (uncompiled) version is much slower. However,
+PyPy always uses a pure-Python implementation, and, as such, it is unaffected
+by this variable.
+
+
+API documentation
+------------------
+
+The documentation is located at https://propcache.readthedocs.io.
+
+Source code
+-----------
+
+The project is hosted on GitHub_
+
+Please file an issue on the `bug tracker
+<https://github.com/aio-libs/propcache/issues>`_ if you have found a bug
+or have some suggestion in order to improve the library.
+
+Discussion list
+---------------
+
+*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
+
+Feel free to post your questions and ideas here.
+
+
+Authors and License
+-------------------
+
+The ``propcache`` package is derived from ``yarl`` which is written by Andrew Svetlov.
+
+It's *Apache 2* licensed and freely available.
+
+
+.. _GitHub: https://github.com/aio-libs/propcache
+
+=========
+Changelog
+=========
+
+..
+ You should *NOT* be adding new change log entries to this file, this
+ file is managed by towncrier. You *may* edit previous change logs to
+ fix problems like typo corrections or such.
+ To add a new change log entry, please see
+ https://pip.pypa.io/en/latest/development/#adding-a-news-entry
+ we named the news folder "changes".
+
+ WARNING: Don't drop the next directive!
+
+.. towncrier release notes start
+
+0.4.1
+=====
+
+*(2025-10-08)*
+
+
+Bug fixes
+---------
+
+- Fixed reference leak caused by ``Py_INCREF`` because Cython has its own reference counter systems -- by `@Vizonex <https://github.com/sponsors/Vizonex>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#162 <https://github.com/aio-libs/propcache/issues/162>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- Fixes the default value for the ``os``
+ parameter in ``reusable-build-wheel.yml``
+ to be ``ubuntu-latest`` instead of
+ ``ubuntu``.
+
+ *Related issues and pull requests on GitHub:*
+ `#155 <https://github.com/aio-libs/propcache/issues/155>`__.
+
+
+----
+
+
+0.4.0
+=====
+
+*(2025-10-04)*
+
+
+Features
+--------
+
+- Optimized propcache by replacing sentinel ``object`` for checking if
+ the ``object`` is ``NULL`` and changed ``dict`` API for
+ Python C-API -- by `@Vizonex <https://github.com/sponsors/Vizonex>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#121 <https://github.com/aio-libs/propcache/issues/121>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- Builds have been added for arm64 Windows
+ wheels and the ``reusable-build-wheel.yml``
+ workflow has been modified to allow for
+ an OS value (``windows-11-arm``) which
+ does not include the ``-latest`` postfix
+ -- by `@finnagin <https://github.com/sponsors/finnagin>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#133 <https://github.com/aio-libs/propcache/issues/133>`__.
+
+- Added CI for CPython 3.14 -- by `@kumaraditya303 <https://github.com/sponsors/kumaraditya303>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#140 <https://github.com/aio-libs/propcache/issues/140>`__.
+
+
+----
+
+
+0.3.2
+=====
+
+*(2025-06-09)*
+
+
+Improved documentation
+----------------------
+
+- Fixed incorrect decorator usage in the ``~propcache.api.under_cached_property`` example code -- by `@meanmail <https://github.com/sponsors/meanmail>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#109 <https://github.com/aio-libs/propcache/issues/109>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#117 <https://github.com/aio-libs/propcache/issues/117>`__.
+
+- Made Cython line tracing opt-in via the ``with-cython-tracing`` build config setting -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Previously, line tracing was enabled by default in ``pyproject.toml``, which caused build issues for some users and made wheels nearly twice as slow.
+
+ Now line tracing is only enabled when explicitly requested via ``pip install . --config-setting=with-cython-tracing=true`` or by setting the ``PROPCACHE_CYTHON_TRACING`` environment variable.
+
+ *Related issues and pull requests on GitHub:*
+ `#118 <https://github.com/aio-libs/propcache/issues/118>`__.
+
+
+----
+
+
+0.3.1
+=====
+
+*(2025-03-25)*
+
+
+Bug fixes
+---------
+
+- Improved typing annotations, fixing some type errors under correct usage
+ and improving typing robustness generally -- by `@Dreamsorcerer <https://github.com/sponsors/Dreamsorcerer>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#103 <https://github.com/aio-libs/propcache/issues/103>`__.
+
+
+----
+
+
+0.3.0
+=====
+
+*(2025-02-20)*
+
+
+Features
+--------
+
+- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#84 <https://github.com/aio-libs/propcache/issues/84>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#84 <https://github.com/aio-libs/propcache/issues/84>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem
+ dependencies using `re-actors/cache-python-deps`_ -- an action by
+ `@webknjaz <https://github.com/sponsors/webknjaz>`__ that takes into account ABI stability and the exact
+ version of Python runtime.
+
+ .. _`re-actors/cache-python-deps`:
+ https://github.com/marketplace/actions/cache-python-deps
+
+ *Related issues and pull requests on GitHub:*
+ `#93 <https://github.com/aio-libs/propcache/issues/93>`__.
+
+
+----
+
+
+0.2.1
+=====
+
+*(2024-12-01)*
+
+
+Bug fixes
+---------
+
+- Stopped implicitly allowing the use of Cython pre-release versions when
+ building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and
+ `@markgreene74 <https://github.com/sponsors/markgreene74>`__.
+
+ *Related commits on GitHub:*
+ `64df0a6 <https://github.com/aio-libs/propcache/commit/64df0a6>`__.
+
+- Fixed ``wrapped`` and ``func`` not being accessible in the Cython versions of ``propcache.api.cached_property`` and ``propcache.api.under_cached_property`` decorators -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#72 <https://github.com/aio-libs/propcache/issues/72>`__.
+
+
+Removals and backward incompatible breaking changes
+---------------------------------------------------
+
+- Removed support for Python 3.8 as it has reached end of life -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#57 <https://github.com/aio-libs/propcache/issues/57>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Stopped implicitly allowing the use of Cython pre-release versions when
+ building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and
+ `@markgreene74 <https://github.com/sponsors/markgreene74>`__.
+
+ *Related commits on GitHub:*
+ `64df0a6 <https://github.com/aio-libs/propcache/commit/64df0a6>`__.
+
+
+----
+
+
+0.2.0
+=====
+
+*(2024-10-07)*
+
+
+Bug fixes
+---------
+
+- Fixed loading the C-extensions on Python 3.8 -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#26 <https://github.com/aio-libs/propcache/issues/26>`__.
+
+
+Features
+--------
+
+- Improved typing for the ``propcache.api.under_cached_property`` decorator -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#38 <https://github.com/aio-libs/propcache/issues/38>`__.
+
+
+Improved documentation
+----------------------
+
+- Added API documentation for the ``propcache.api.cached_property`` and ``propcache.api.under_cached_property`` decorators -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#16 <https://github.com/aio-libs/propcache/issues/16>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Moved ``propcache.api.under_cached_property`` and ``propcache.api.cached_property`` to `propcache.api` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Both decorators remain importable from the top-level package, however importing from `propcache.api` is now the recommended way to use them.
+
+ *Related issues and pull requests on GitHub:*
+ `#19 <https://github.com/aio-libs/propcache/issues/19>`__, `#24 <https://github.com/aio-libs/propcache/issues/24>`__, `#32 <https://github.com/aio-libs/propcache/issues/32>`__.
+
+- Converted project to use a src layout -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#22 <https://github.com/aio-libs/propcache/issues/22>`__, `#29 <https://github.com/aio-libs/propcache/issues/29>`__, `#37 <https://github.com/aio-libs/propcache/issues/37>`__.
+
+
+----
+
+
+0.1.0
+=====
+
+*(2024-10-03)*
+
+
+Features
+--------
+
+- Added ``armv7l`` wheels -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#5 <https://github.com/aio-libs/propcache/issues/5>`__.
+
+
+----
+
+
+0.0.0
+=====
+
+*(2024-10-02)*
+
+
+- Initial release.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/RECORD"
new file mode 100644
index 0000000..4e7f77f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/RECORD"
@@ -0,0 +1,18 @@
+propcache-0.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+propcache-0.4.1.dist-info/METADATA,sha256=uH_-mZacCpex3PvgM5lxCcQRoVNcys9To6caNbJAlz0,14188
+propcache-0.4.1.dist-info/RECORD,,
+propcache-0.4.1.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101
+propcache-0.4.1.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+propcache-0.4.1.dist-info/licenses/NOTICE,sha256=VtasbIEFwKUTBMIdsGDjYa-ajqCvmnXCOcKLXRNpODg,609
+propcache-0.4.1.dist-info/top_level.txt,sha256=pVF_GbqSAITPMiX27kfU3QP9-ufhRvkADmudDxWdF3w,10
+propcache/__init__.py,sha256=8kebeGvYn7s-ow1AFmK0A4EvonZMpyM7Lkzs2Ktia3Y,965
+propcache/__pycache__/__init__.cpython-312.pyc,,
+propcache/__pycache__/_helpers.cpython-312.pyc,,
+propcache/__pycache__/_helpers_py.cpython-312.pyc,,
+propcache/__pycache__/api.cpython-312.pyc,,
+propcache/_helpers.py,sha256=68SQm6kETN8Mnt9Ol26LJYgHgmB0mKy1tp92888zN4k,1553
+propcache/_helpers_c.cp312-win_amd64.pyd,sha256=9AqXBuIAWM2omRGPqnpo7lhDAqT9aTO6uH7QvFzn0so,65024
+propcache/_helpers_c.pyx,sha256=kcJa1U5lh54TPCqAeZ0cVB7URcb3I8ZbJieOrkNhLQE,3265
+propcache/_helpers_py.py,sha256=Wixs2zWA-FBU-j4zLPyBUU24FEfPhKk-UunFSp9q95U,1909
+propcache/api.py,sha256=wvgB-ypkkI5uf72VVYl2NFGc_TnzUQA2CxC7dTlL5ak,179
+propcache/py.typed,sha256=ay5OMO475PlcZ_Fbun9maHW7Y6MBTk0UXL4ztHx3Iug,14
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/WHEEL"
new file mode 100644
index 0000000..10ac2c2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/LICENSE"
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/NOTICE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/NOTICE"
new file mode 100644
index 0000000..fa53b2b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/licenses/NOTICE"
@@ -0,0 +1,13 @@
+ Copyright 2016-2021, Andrew Svetlov and aio-libs team
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/top_level.txt"
new file mode 100644
index 0000000..8c9accf
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache-0.4.1.dist-info/top_level.txt"
@@ -0,0 +1 @@
+propcache
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/__init__.py"
new file mode 100644
index 0000000..a6444d1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/__init__.py"
@@ -0,0 +1,32 @@
+"""propcache: An accelerated property cache for Python classes."""
+
+from typing import TYPE_CHECKING
+
+_PUBLIC_API = ("cached_property", "under_cached_property")
+
+__version__ = "0.4.1"
+__all__ = ()
+
+# Imports have moved to `propcache.api` in 0.2.0+.
+# This module is now a facade for the API.
+if TYPE_CHECKING:
+ from .api import cached_property as cached_property # noqa: F401
+ from .api import under_cached_property as under_cached_property # noqa: F401
+
+
+def _import_facade(attr: str) -> object:
+ """Import the public API from the `api` module."""
+ if attr in _PUBLIC_API:
+ from . import api # pylint: disable=import-outside-toplevel
+
+ return getattr(api, attr)
+ raise AttributeError(f"module '{__package__}' has no attribute '{attr}'")
+
+
+def _dir_facade() -> list[str]:
+ """Include the public API in the module's dir() output."""
+ return [*_PUBLIC_API, *globals().keys()]
+
+
+__getattr__ = _import_facade
+__dir__ = _dir_facade
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers.py"
new file mode 100644
index 0000000..1e52895
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers.py"
@@ -0,0 +1,39 @@
+import os
+import sys
+from typing import TYPE_CHECKING
+
+__all__ = ("cached_property", "under_cached_property")
+
+
+NO_EXTENSIONS = bool(os.environ.get("PROPCACHE_NO_EXTENSIONS")) # type: bool
+if sys.implementation.name != "cpython":
+ NO_EXTENSIONS = True
+
+
+# isort: off
+if TYPE_CHECKING:
+ from ._helpers_py import cached_property as cached_property_py
+ from ._helpers_py import under_cached_property as under_cached_property_py
+
+ cached_property = cached_property_py
+ under_cached_property = under_cached_property_py
+elif not NO_EXTENSIONS: # pragma: no branch
+ try:
+ from ._helpers_c import cached_property as cached_property_c # type: ignore[attr-defined, unused-ignore]
+ from ._helpers_c import under_cached_property as under_cached_property_c # type: ignore[attr-defined, unused-ignore]
+
+ cached_property = cached_property_c
+ under_cached_property = under_cached_property_c
+ except ImportError: # pragma: no cover
+ from ._helpers_py import cached_property as cached_property_py
+ from ._helpers_py import under_cached_property as under_cached_property_py
+
+ cached_property = cached_property_py # type: ignore[assignment, misc]
+ under_cached_property = under_cached_property_py
+else:
+ from ._helpers_py import cached_property as cached_property_py
+ from ._helpers_py import under_cached_property as under_cached_property_py
+
+ cached_property = cached_property_py # type: ignore[assignment, misc]
+ under_cached_property = under_cached_property_py
+# isort: on
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..de9b9e9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.pyx" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.pyx"
new file mode 100644
index 0000000..9e9e558
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_c.pyx"
@@ -0,0 +1,103 @@
+# cython: language_level=3, freethreading_compatible=True
+from types import GenericAlias
+
+from cpython.dict cimport PyDict_GetItem
+from cpython.object cimport PyObject
+
+
+cdef extern from "Python.h":
+ # Call a callable Python object callable with exactly
+ # 1 positional argument arg and no keyword arguments.
+ # Return the result of the call on success, or raise
+ # an exception and return NULL on failure.
+ PyObject* PyObject_CallOneArg(
+ object callable, object arg
+ ) except NULL
+ int PyDict_SetItem(
+ object dict, object key, PyObject* value
+ ) except -1
+ void Py_DECREF(PyObject*)
+
+
+cdef class under_cached_property:
+ """Use as a class method decorator. It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+
+ """
+
+ cdef readonly object wrapped
+ cdef object name
+
+ def __init__(self, object wrapped):
+ self.wrapped = wrapped
+ self.name = wrapped.__name__
+
+ @property
+ def __doc__(self):
+ return self.wrapped.__doc__
+
+ def __get__(self, object inst, owner):
+ if inst is None:
+ return self
+ cdef dict cache = inst._cache
+ cdef PyObject* val = PyDict_GetItem(cache, self.name)
+ if val == NULL:
+ val = PyObject_CallOneArg(self.wrapped, inst)
+ PyDict_SetItem(cache, self.name, val)
+ Py_DECREF(val)
+ return <object>val
+
+ def __set__(self, inst, value):
+ raise AttributeError("cached property is read-only")
+
+ __class_getitem__ = classmethod(GenericAlias)
+
+
+cdef class cached_property:
+ """Use as a class method decorator. It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+
+ """
+
+ cdef readonly object func
+ cdef object name
+
+ def __init__(self, func):
+ self.func = func
+ self.name = None
+
+ @property
+ def __doc__(self):
+ return self.func.__doc__
+
+ def __set_name__(self, owner, object name):
+ if self.name is None:
+ self.name = name
+ elif name != self.name:
+ raise TypeError(
+ "Cannot assign the same cached_property to two different names "
+ f"({self.name!r} and {name!r})."
+ )
+
+ def __get__(self, inst, owner):
+ if inst is None:
+ return self
+ if self.name is None:
+ raise TypeError(
+ "Cannot use cached_property instance"
+ " without calling __set_name__ on it.")
+ cdef dict cache = inst.__dict__
+ cdef PyObject* val = PyDict_GetItem(cache, self.name)
+ if val is NULL:
+ val = PyObject_CallOneArg(self.func, inst)
+ PyDict_SetItem(cache, self.name, val)
+ Py_DECREF(val)
+ return <object>val
+
+ __class_getitem__ = classmethod(GenericAlias)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_py.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_py.py"
new file mode 100644
index 0000000..1374fc6
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/_helpers_py.py"
@@ -0,0 +1,62 @@
+"""Various helper functions."""
+
+import sys
+from collections.abc import Mapping
+from functools import cached_property
+from typing import Any, Callable, Generic, Optional, Protocol, TypeVar, Union, overload
+
+__all__ = ("under_cached_property", "cached_property")
+
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ Self = Any
+
+_T = TypeVar("_T")
+# We use Mapping to make it possible to use TypedDict, but this isn't
+# technically type safe as we need to assign into the dict.
+_Cache = TypeVar("_Cache", bound=Mapping[str, Any])
+
+
+class _CacheImpl(Protocol[_Cache]):
+ _cache: _Cache
+
+
+class under_cached_property(Generic[_T]):
+ """Use as a class method decorator.
+
+ It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+ """
+
+ def __init__(self, wrapped: Callable[[Any], _T]) -> None:
+ self.wrapped = wrapped
+ self.__doc__ = wrapped.__doc__
+ self.name = wrapped.__name__
+
+ @overload
+ def __get__(self, inst: None, owner: Optional[type[object]] = None) -> Self: ...
+
+ @overload
+ def __get__(
+ self, inst: _CacheImpl[Any], owner: Optional[type[object]] = None
+ ) -> _T: ...
+
+ def __get__(
+ self, inst: Optional[_CacheImpl[Any]], owner: Optional[type[object]] = None
+ ) -> Union[_T, Self]:
+ if inst is None:
+ return self
+ try:
+ return inst._cache[self.name] # type: ignore[no-any-return]
+ except KeyError:
+ val = self.wrapped(inst)
+ inst._cache[self.name] = val
+ return val
+
+ def __set__(self, inst: _CacheImpl[Any], value: _T) -> None:
+ raise AttributeError("cached property is read-only")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/api.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/api.py"
new file mode 100644
index 0000000..22389e6
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/api.py"
@@ -0,0 +1,8 @@
+"""Public API of the property caching library."""
+
+from ._helpers import cached_property, under_cached_property
+
+__all__ = (
+ "cached_property",
+ "under_cached_property",
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/py.typed"
new file mode 100644
index 0000000..dcf2c80
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/propcache/py.typed"
@@ -0,0 +1 @@
+# Placeholder
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/METADATA"
new file mode 100644
index 0000000..ef67c74
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/METADATA"
@@ -0,0 +1,131 @@
+Metadata-Version: 2.4
+Name: PyMySQL
+Version: 1.1.2
+Summary: Pure Python MySQL Driver
+Author-email: Inada Naoki <songofacandy@gmail.com>, Yutaka Matsubara <yutaka.matsubara@gmail.com>
+License-Expression: MIT
+Project-URL: Project, https://github.com/PyMySQL/PyMySQL
+Project-URL: Documentation, https://pymysql.readthedocs.io/
+Project-URL: Changelog, https://github.com/PyMySQL/PyMySQL/blob/main/CHANGELOG.md
+Keywords: MySQL
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Database
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: rsa
+Requires-Dist: cryptography; extra == "rsa"
+Provides-Extra: ed25519
+Requires-Dist: PyNaCl>=1.4.0; extra == "ed25519"
+Dynamic: license-file
+
+[](https://pymysql.readthedocs.io/)
+[](https://codecov.io/gh/PyMySQL/PyMySQL)
+[](https://deepwiki.com/PyMySQL/PyMySQL)
+
+# PyMySQL
+
+This package contains a pure-Python MySQL and MariaDB client library, based on
+[PEP 249](https://www.python.org/dev/peps/pep-0249/).
+
+## Requirements
+
+- Python -- one of the following:
+ - [CPython](https://www.python.org/) : 3.9 and newer
+ - [PyPy](https://pypy.org/) : Latest 3.x version
+- MySQL Server -- one of the following:
+ - [MySQL](https://www.mysql.com/) LTS versions
+ - [MariaDB](https://mariadb.org/) LTS versions
+
+## Installation
+
+Package is uploaded on [PyPI](https://pypi.org/project/PyMySQL).
+
+You can install it with pip:
+
+ $ python3 -m pip install PyMySQL
+
+To use "sha256_password" or "caching_sha2_password" for authenticate,
+you need to install additional dependency:
+
+ $ python3 -m pip install PyMySQL[rsa]
+
+To use MariaDB's "ed25519" authentication method, you need to install
+additional dependency:
+
+ $ python3 -m pip install PyMySQL[ed25519]
+
+## Documentation
+
+Documentation is available online: <https://pymysql.readthedocs.io/>
+
+For support, please refer to the
+[StackOverflow](https://stackoverflow.com/questions/tagged/pymysql).
+
+## Example
+
+The following examples make use of a simple table
+
+``` sql
+CREATE TABLE `users` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `email` varchar(255) COLLATE utf8_bin NOT NULL,
+ `password` varchar(255) COLLATE utf8_bin NOT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin
+AUTO_INCREMENT=1 ;
+```
+
+``` python
+import pymysql.cursors
+
+# Connect to the database
+connection = pymysql.connect(host='localhost',
+ user='user',
+ password='passwd',
+ database='db',
+ cursorclass=pymysql.cursors.DictCursor)
+
+with connection:
+ with connection.cursor() as cursor:
+ # Create a new record
+ sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
+ cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
+
+ # connection is not autocommit by default. So you must commit to save
+ # your changes.
+ connection.commit()
+
+ with connection.cursor() as cursor:
+ # Read a single record
+ sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
+ cursor.execute(sql, ('webmaster@python.org',))
+ result = cursor.fetchone()
+ print(result)
+```
+
+This example will print:
+
+``` python
+{'password': 'very-secret', 'id': 1}
+```
+
+## Resources
+
+- DB-API 2.0: <https://www.python.org/dev/peps/pep-0249/>
+- MySQL Reference Manuals: <https://dev.mysql.com/doc/>
+- Getting Help With MariaDB <https://mariadb.com/kb/en/getting-help-with-mariadb/>
+- MySQL client/server protocol:
+ <https://dev.mysql.com/doc/internals/en/client-server-protocol.html>
+- "Connector" channel in MySQL Community Slack:
+ <https://lefred.be/mysql-community-on-slack/>
+- PyMySQL mailing list:
+ <https://groups.google.com/forum/#!forum/pymysql-users>
+
+## License
+
+PyMySQL is released under the MIT License. See LICENSE for more
+information.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/RECORD"
new file mode 100644
index 0000000..f67045b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/RECORD"
@@ -0,0 +1,43 @@
+pymysql-1.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pymysql-1.1.2.dist-info/METADATA,sha256=tybjtEhDSfbAzYS5Ag7-X7mAglkvBrOcEJMW1o7_Dqg,4298
+pymysql-1.1.2.dist-info/RECORD,,
+pymysql-1.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pymysql-1.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
+pymysql-1.1.2.dist-info/licenses/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
+pymysql-1.1.2.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
+pymysql/__init__.py,sha256=0sUOasTjx9try2-4ZmPaogG0PVar7v37HBkq1ef1xLU,4262
+pymysql/__pycache__/__init__.cpython-312.pyc,,
+pymysql/__pycache__/_auth.cpython-312.pyc,,
+pymysql/__pycache__/charset.cpython-312.pyc,,
+pymysql/__pycache__/connections.cpython-312.pyc,,
+pymysql/__pycache__/converters.cpython-312.pyc,,
+pymysql/__pycache__/cursors.cpython-312.pyc,,
+pymysql/__pycache__/err.cpython-312.pyc,,
+pymysql/__pycache__/optionfile.cpython-312.pyc,,
+pymysql/__pycache__/protocol.cpython-312.pyc,,
+pymysql/__pycache__/times.cpython-312.pyc,,
+pymysql/_auth.py,sha256=7bIFnJ7lJrFEhKLEnHGo1-h7E5cnZB2211KE1vatBAQ,7638
+pymysql/charset.py,sha256=Y4GgMDxn0Yz-99NwstfCLeCfoRFdwywWoHrn5Gnvghk,10258
+pymysql/connections.py,sha256=Bs8PG2UacyQF4hrJ7N68mdpifm7t5At0vTHvZFHZG8k,53908
+pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878
+pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679
+pymysql/constants/CR.py,sha256=Qk35FWRMxRHd6Sa9CCIATMh7jegR3xnLdrdaBCT0dTQ,2320
+pymysql/constants/ER.py,sha256=nwqX_r0o4mmN4Cxm7NVRyJOTVov_5Gbl5peGe6oz5fk,12357
+pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
+pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
+pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333
+pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pymysql/constants/__pycache__/CLIENT.cpython-312.pyc,,
+pymysql/constants/__pycache__/COMMAND.cpython-312.pyc,,
+pymysql/constants/__pycache__/CR.cpython-312.pyc,,
+pymysql/constants/__pycache__/ER.cpython-312.pyc,,
+pymysql/constants/__pycache__/FIELD_TYPE.cpython-312.pyc,,
+pymysql/constants/__pycache__/FLAG.cpython-312.pyc,,
+pymysql/constants/__pycache__/SERVER_STATUS.cpython-312.pyc,,
+pymysql/constants/__pycache__/__init__.cpython-312.pyc,,
+pymysql/converters.py,sha256=8Jl-1K1Nt-ZKAiahBJV4MoSvO1O-PZtu8CfQG9EDftk,9523
+pymysql/cursors.py,sha256=a4-JHYP148kx-9qVNRz8vTtlilGlKDbk_QtFlWph5L4,16535
+pymysql/err.py,sha256=wLe0af6AmK6z7fq_MnYfgYsc6LnUuMj7EliHPZKquBA,4178
+pymysql/optionfile.py,sha256=eQoz6c43yvmHtp5MI9TB2GPRdoggOLemcUWABksfutk,651
+pymysql/protocol.py,sha256=aD-PGPRYcwkSI6ZJoJWZVRKn9H_A0f70KfPDu65tq0o,11812
+pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/REQUESTED" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/REQUESTED"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/REQUESTED"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/WHEEL"
new file mode 100644
index 0000000..e7fa31b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..86b18e1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/licenses/LICENSE"
@@ -0,0 +1,19 @@
+Copyright (c) 2010, 2013 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/top_level.txt"
new file mode 100644
index 0000000..d4a7eda
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql-1.1.2.dist-info/top_level.txt"
@@ -0,0 +1 @@
+pymysql
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/__init__.py"
new file mode 100644
index 0000000..0ec7ae6
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/__init__.py"
@@ -0,0 +1,183 @@
+"""
+PyMySQL: A pure-Python MySQL client library.
+
+Copyright (c) 2010-2016 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+
+import sys
+
+from .constants import FIELD_TYPE
+from .err import (
+ Warning,
+ Error,
+ InterfaceError,
+ DataError,
+ DatabaseError,
+ OperationalError,
+ IntegrityError,
+ InternalError,
+ NotSupportedError,
+ ProgrammingError,
+ MySQLError,
+)
+from .times import (
+ Date,
+ Time,
+ Timestamp,
+ DateFromTicks,
+ TimeFromTicks,
+ TimestampFromTicks,
+)
+
+# PyMySQL version.
+# Used by setuptools and connection_attrs
+VERSION = (1, 1, 2, "final")
+VERSION_STRING = "1.1.2"
+
+### for mysqlclient compatibility
+### Django checks mysqlclient version.
+version_info = (1, 4, 6, "final", 1)
+__version__ = "1.4.6"
+
+
+def get_client_info(): # for MySQLdb compatibility
+ return __version__
+
+
+def install_as_MySQLdb():
+ """
+ After this function is called, any application that imports MySQLdb
+ will unwittingly actually use pymysql.
+ """
+ sys.modules["MySQLdb"] = sys.modules["pymysql"]
+
+
+# end of mysqlclient compatibility code
+
+threadsafety = 1
+apilevel = "2.0"
+paramstyle = "pyformat"
+
+from . import connections # noqa: E402
+
+
+class DBAPISet(frozenset):
+ def __ne__(self, other):
+ if isinstance(other, set):
+ return frozenset.__ne__(self, other)
+ else:
+ return other not in self
+
+ def __eq__(self, other):
+ if isinstance(other, frozenset):
+ return frozenset.__eq__(self, other)
+ else:
+ return other in self
+
+ def __hash__(self):
+ return frozenset.__hash__(self)
+
+
+STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING])
+BINARY = DBAPISet(
+ [
+ FIELD_TYPE.BLOB,
+ FIELD_TYPE.LONG_BLOB,
+ FIELD_TYPE.MEDIUM_BLOB,
+ FIELD_TYPE.TINY_BLOB,
+ ]
+)
+NUMBER = DBAPISet(
+ [
+ FIELD_TYPE.DECIMAL,
+ FIELD_TYPE.DOUBLE,
+ FIELD_TYPE.FLOAT,
+ FIELD_TYPE.INT24,
+ FIELD_TYPE.LONG,
+ FIELD_TYPE.LONGLONG,
+ FIELD_TYPE.TINY,
+ FIELD_TYPE.YEAR,
+ ]
+)
+DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
+TIME = DBAPISet([FIELD_TYPE.TIME])
+TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
+DATETIME = TIMESTAMP
+ROWID = DBAPISet()
+
+
+def Binary(x):
+ """Return x as a binary type."""
+ return bytes(x)
+
+
+def thread_safe():
+ return True # match MySQLdb.thread_safe()
+
+
+Connect = connect = Connection = connections.Connection
+NULL = "NULL"
+
+
+__all__ = [
+ "BINARY",
+ "Binary",
+ "Connect",
+ "Connection",
+ "DATE",
+ "Date",
+ "Time",
+ "Timestamp",
+ "DateFromTicks",
+ "TimeFromTicks",
+ "TimestampFromTicks",
+ "DataError",
+ "DatabaseError",
+ "Error",
+ "FIELD_TYPE",
+ "IntegrityError",
+ "InterfaceError",
+ "InternalError",
+ "MySQLError",
+ "NULL",
+ "NUMBER",
+ "NotSupportedError",
+ "DBAPISet",
+ "OperationalError",
+ "ProgrammingError",
+ "ROWID",
+ "STRING",
+ "TIME",
+ "TIMESTAMP",
+ "Warning",
+ "apilevel",
+ "connect",
+ "connections",
+ "constants",
+ "converters",
+ "cursors",
+ "get_client_info",
+ "paramstyle",
+ "threadsafety",
+ "version_info",
+ "install_as_MySQLdb",
+ "__version__",
+]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/_auth.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/_auth.py"
new file mode 100644
index 0000000..4790449
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/_auth.py"
@@ -0,0 +1,272 @@
+"""
+Implements auth methods
+"""
+
+from .err import OperationalError
+
+
+try:
+ from cryptography.hazmat.backends import default_backend
+ from cryptography.hazmat.primitives import serialization, hashes
+ from cryptography.hazmat.primitives.asymmetric import padding
+
+ _have_cryptography = True
+except ImportError:
+ _have_cryptography = False
+
+from functools import partial
+import hashlib
+
+
+DEBUG = False
+SCRAMBLE_LENGTH = 20
+sha1_new = partial(hashlib.new, "sha1")
+
+
+# mysql_native_password
+# https://dev.mysql.com/doc/internals/en/secure-password-authentication.html#packet-Authentication::Native41
+
+
+def scramble_native_password(password, message):
+ """Scramble used for mysql_native_password"""
+ if not password:
+ return b""
+
+ stage1 = sha1_new(password).digest()
+ stage2 = sha1_new(stage1).digest()
+ s = sha1_new()
+ s.update(message[:SCRAMBLE_LENGTH])
+ s.update(stage2)
+ result = s.digest()
+ return _my_crypt(result, stage1)
+
+
+def _my_crypt(message1, message2):
+ result = bytearray(message1)
+
+ for i in range(len(result)):
+ result[i] ^= message2[i]
+
+ return bytes(result)
+
+
+# MariaDB's client_ed25519-plugin
+# https://mariadb.com/kb/en/library/connection/#client_ed25519-plugin
+
+_nacl_bindings = False
+
+
+def _init_nacl():
+ global _nacl_bindings
+ try:
+ from nacl import bindings
+
+ _nacl_bindings = bindings
+ except ImportError:
+ raise RuntimeError(
+ "'pynacl' package is required for ed25519_password auth method"
+ )
+
+
+def _scalar_clamp(s32):
+ ba = bytearray(s32)
+ ba0 = bytes(bytearray([ba[0] & 248]))
+ ba31 = bytes(bytearray([(ba[31] & 127) | 64]))
+ return ba0 + bytes(s32[1:31]) + ba31
+
+
+def ed25519_password(password, scramble):
+ """Sign a random scramble with elliptic curve Ed25519.
+
+ Secret and public key are derived from password.
+ """
+ # variable names based on rfc8032 section-5.1.6
+ #
+ if not _nacl_bindings:
+ _init_nacl()
+
+ # h = SHA512(password)
+ h = hashlib.sha512(password).digest()
+
+ # s = prune(first_half(h))
+ s = _scalar_clamp(h[:32])
+
+ # r = SHA512(second_half(h) || M)
+ r = hashlib.sha512(h[32:] + scramble).digest()
+
+ # R = encoded point [r]B
+ r = _nacl_bindings.crypto_core_ed25519_scalar_reduce(r)
+ R = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(r)
+
+ # A = encoded point [s]B
+ A = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(s)
+
+ # k = SHA512(R || A || M)
+ k = hashlib.sha512(R + A + scramble).digest()
+
+ # S = (k * s + r) mod L
+ k = _nacl_bindings.crypto_core_ed25519_scalar_reduce(k)
+ ks = _nacl_bindings.crypto_core_ed25519_scalar_mul(k, s)
+ S = _nacl_bindings.crypto_core_ed25519_scalar_add(ks, r)
+
+ # signature = R || S
+ return R + S
+
+
+# sha256_password
+
+
+def _roundtrip(conn, send_data):
+ conn.write_packet(send_data)
+ pkt = conn._read_packet()
+ pkt.check_error()
+ return pkt
+
+
+def _xor_password(password, salt):
+ # Trailing NUL character will be added in Auth Switch Request.
+ # See https://github.com/mysql/mysql-server/blob/7d10c82196c8e45554f27c00681474a9fb86d137/sql/auth/sha2_password.cc#L939-L945
+ salt = salt[:SCRAMBLE_LENGTH]
+ password_bytes = bytearray(password)
+ # salt = bytearray(salt) # for PY2 compat.
+ salt_len = len(salt)
+ for i in range(len(password_bytes)):
+ password_bytes[i] ^= salt[i % salt_len]
+ return bytes(password_bytes)
+
+
+def sha2_rsa_encrypt(password, salt, public_key):
+ """Encrypt password with salt and public_key.
+
+ Used for sha256_password and caching_sha2_password.
+ """
+ if not _have_cryptography:
+ raise RuntimeError(
+ "'cryptography' package is required for sha256_password or"
+ + " caching_sha2_password auth methods"
+ )
+ message = _xor_password(password + b"\0", salt)
+ rsa_key = serialization.load_pem_public_key(public_key, default_backend())
+ return rsa_key.encrypt(
+ message,
+ padding.OAEP(
+ mgf=padding.MGF1(algorithm=hashes.SHA1()),
+ algorithm=hashes.SHA1(),
+ label=None,
+ ),
+ )
+
+
+def sha256_password_auth(conn, pkt):
+ if conn._secure:
+ if DEBUG:
+ print("sha256: Sending plain password")
+ data = conn.password + b"\0"
+ return _roundtrip(conn, data)
+
+ if pkt.is_auth_switch_request():
+ conn.salt = pkt.read_all()
+ if conn.salt.endswith(b"\0"):
+ conn.salt = conn.salt[:-1]
+ if not conn.server_public_key and conn.password:
+ # Request server public key
+ if DEBUG:
+ print("sha256: Requesting server public key")
+ pkt = _roundtrip(conn, b"\1")
+
+ if pkt.is_extra_auth_data():
+ conn.server_public_key = pkt._data[1:]
+ if DEBUG:
+ print("Received public key:\n", conn.server_public_key.decode("ascii"))
+
+ if conn.password:
+ if not conn.server_public_key:
+ raise OperationalError("Couldn't receive server's public key")
+
+ data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
+ else:
+ data = b""
+
+ return _roundtrip(conn, data)
+
+
+def scramble_caching_sha2(password, nonce):
+ # (bytes, bytes) -> bytes
+ """Scramble algorithm used in cached_sha2_password fast path.
+
+ XOR(SHA256(password), SHA256(SHA256(SHA256(password)), nonce))
+ """
+ if not password:
+ return b""
+
+ p1 = hashlib.sha256(password).digest()
+ p2 = hashlib.sha256(p1).digest()
+ p3 = hashlib.sha256(p2 + nonce).digest()
+
+ res = bytearray(p1)
+ for i in range(len(p3)):
+ res[i] ^= p3[i]
+
+ return bytes(res)
+
+
+def caching_sha2_password_auth(conn, pkt):
+ # No password fast path
+ if not conn.password:
+ return _roundtrip(conn, b"")
+
+ if pkt.is_auth_switch_request():
+ # Try from fast auth
+ conn.salt = pkt.read_all()
+ if conn.salt.endswith(b"\0"): # str.removesuffix is available in 3.9
+ conn.salt = conn.salt[:-1]
+ if DEBUG:
+ print(f"caching sha2: Trying fast path. salt={conn.salt.hex()!r}")
+ scrambled = scramble_caching_sha2(conn.password, conn.salt)
+ pkt = _roundtrip(conn, scrambled)
+ # else: fast auth is tried in initial handshake
+
+ if not pkt.is_extra_auth_data():
+ raise OperationalError(
+ "caching sha2: Unknown packet for fast auth: %s" % pkt._data[:1]
+ )
+
+ # magic numbers:
+ # 2 - request public key
+ # 3 - fast auth succeeded
+ # 4 - need full auth
+
+ pkt.advance(1)
+ n = pkt.read_uint8()
+
+ if n == 3:
+ if DEBUG:
+ print("caching sha2: succeeded by fast path.")
+ pkt = conn._read_packet()
+ pkt.check_error() # pkt must be OK packet
+ return pkt
+
+ if n != 4:
+ raise OperationalError("caching sha2: Unknown result for fast auth: %s" % n)
+
+ if DEBUG:
+ print("caching sha2: Trying full auth...")
+
+ if conn._secure:
+ if DEBUG:
+ print("caching sha2: Sending plain password via secure connection")
+ return _roundtrip(conn, conn.password + b"\0")
+
+ if not conn.server_public_key:
+ pkt = _roundtrip(conn, b"\x02") # Request public key
+ if not pkt.is_extra_auth_data():
+ raise OperationalError(
+ "caching sha2: Unknown packet for public key: %s" % pkt._data[:1]
+ )
+
+ conn.server_public_key = pkt._data[1:]
+ if DEBUG:
+ print(conn.server_public_key.decode("ascii"))
+
+ data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
+ pkt = _roundtrip(conn, data)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/charset.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/charset.py"
new file mode 100644
index 0000000..ec8e14e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/charset.py"
@@ -0,0 +1,217 @@
+# Internal use only. Do not use directly.
+
+MBLENGTH = {8: 1, 33: 3, 88: 2, 91: 2}
+
+
+class Charset:
+ def __init__(self, id, name, collation, is_default=False):
+ self.id, self.name, self.collation = id, name, collation
+ self.is_default = is_default
+
+ def __repr__(self):
+ return (
+ f"Charset(id={self.id}, name={self.name!r}, collation={self.collation!r})"
+ )
+
+ @property
+ def encoding(self):
+ name = self.name
+ if name in ("utf8mb4", "utf8mb3"):
+ return "utf8"
+ if name == "latin1":
+ return "cp1252"
+ if name == "koi8r":
+ return "koi8_r"
+ if name == "koi8u":
+ return "koi8_u"
+ return name
+
+ @property
+ def is_binary(self):
+ return self.id == 63
+
+
+class Charsets:
+ def __init__(self):
+ self._by_id = {}
+ self._by_name = {}
+
+ def add(self, c):
+ self._by_id[c.id] = c
+ if c.is_default:
+ self._by_name[c.name] = c
+
+ def by_id(self, id):
+ return self._by_id[id]
+
+ def by_name(self, name):
+ name = name.lower()
+ if name == "utf8":
+ name = "utf8mb4"
+ return self._by_name.get(name)
+
+
+_charsets = Charsets()
+charset_by_name = _charsets.by_name
+charset_by_id = _charsets.by_id
+
+"""
+TODO: update this script.
+
+Generated with:
+
+mysql -N -s -e "select id, character_set_name, collation_name, is_default
+from information_schema.collations order by id;" | python -c "import sys
+for l in sys.stdin.readlines():
+ id, name, collation, is_default = l.split(chr(9))
+ if is_default.strip() == "Yes":
+ print('_charsets.add(Charset(%s, \'%s\', \'%s\', True))' \
+ % (id, name, collation))
+ else:
+ print('_charsets.add(Charset(%s, \'%s\', \'%s\'))' \
+ % (id, name, collation, bool(is_default.strip()))
+"""
+
+_charsets.add(Charset(1, "big5", "big5_chinese_ci", True))
+_charsets.add(Charset(2, "latin2", "latin2_czech_cs"))
+_charsets.add(Charset(3, "dec8", "dec8_swedish_ci", True))
+_charsets.add(Charset(4, "cp850", "cp850_general_ci", True))
+_charsets.add(Charset(5, "latin1", "latin1_german1_ci"))
+_charsets.add(Charset(6, "hp8", "hp8_english_ci", True))
+_charsets.add(Charset(7, "koi8r", "koi8r_general_ci", True))
+_charsets.add(Charset(8, "latin1", "latin1_swedish_ci", True))
+_charsets.add(Charset(9, "latin2", "latin2_general_ci", True))
+_charsets.add(Charset(10, "swe7", "swe7_swedish_ci", True))
+_charsets.add(Charset(11, "ascii", "ascii_general_ci", True))
+_charsets.add(Charset(12, "ujis", "ujis_japanese_ci", True))
+_charsets.add(Charset(13, "sjis", "sjis_japanese_ci", True))
+_charsets.add(Charset(14, "cp1251", "cp1251_bulgarian_ci"))
+_charsets.add(Charset(15, "latin1", "latin1_danish_ci"))
+_charsets.add(Charset(16, "hebrew", "hebrew_general_ci", True))
+_charsets.add(Charset(18, "tis620", "tis620_thai_ci", True))
+_charsets.add(Charset(19, "euckr", "euckr_korean_ci", True))
+_charsets.add(Charset(20, "latin7", "latin7_estonian_cs"))
+_charsets.add(Charset(21, "latin2", "latin2_hungarian_ci"))
+_charsets.add(Charset(22, "koi8u", "koi8u_general_ci", True))
+_charsets.add(Charset(23, "cp1251", "cp1251_ukrainian_ci"))
+_charsets.add(Charset(24, "gb2312", "gb2312_chinese_ci", True))
+_charsets.add(Charset(25, "greek", "greek_general_ci", True))
+_charsets.add(Charset(26, "cp1250", "cp1250_general_ci", True))
+_charsets.add(Charset(27, "latin2", "latin2_croatian_ci"))
+_charsets.add(Charset(28, "gbk", "gbk_chinese_ci", True))
+_charsets.add(Charset(29, "cp1257", "cp1257_lithuanian_ci"))
+_charsets.add(Charset(30, "latin5", "latin5_turkish_ci", True))
+_charsets.add(Charset(31, "latin1", "latin1_german2_ci"))
+_charsets.add(Charset(32, "armscii8", "armscii8_general_ci", True))
+_charsets.add(Charset(33, "utf8mb3", "utf8mb3_general_ci", True))
+_charsets.add(Charset(34, "cp1250", "cp1250_czech_cs"))
+_charsets.add(Charset(36, "cp866", "cp866_general_ci", True))
+_charsets.add(Charset(37, "keybcs2", "keybcs2_general_ci", True))
+_charsets.add(Charset(38, "macce", "macce_general_ci", True))
+_charsets.add(Charset(39, "macroman", "macroman_general_ci", True))
+_charsets.add(Charset(40, "cp852", "cp852_general_ci", True))
+_charsets.add(Charset(41, "latin7", "latin7_general_ci", True))
+_charsets.add(Charset(42, "latin7", "latin7_general_cs"))
+_charsets.add(Charset(43, "macce", "macce_bin"))
+_charsets.add(Charset(44, "cp1250", "cp1250_croatian_ci"))
+_charsets.add(Charset(45, "utf8mb4", "utf8mb4_general_ci", True))
+_charsets.add(Charset(46, "utf8mb4", "utf8mb4_bin"))
+_charsets.add(Charset(47, "latin1", "latin1_bin"))
+_charsets.add(Charset(48, "latin1", "latin1_general_ci"))
+_charsets.add(Charset(49, "latin1", "latin1_general_cs"))
+_charsets.add(Charset(50, "cp1251", "cp1251_bin"))
+_charsets.add(Charset(51, "cp1251", "cp1251_general_ci", True))
+_charsets.add(Charset(52, "cp1251", "cp1251_general_cs"))
+_charsets.add(Charset(53, "macroman", "macroman_bin"))
+_charsets.add(Charset(57, "cp1256", "cp1256_general_ci", True))
+_charsets.add(Charset(58, "cp1257", "cp1257_bin"))
+_charsets.add(Charset(59, "cp1257", "cp1257_general_ci", True))
+_charsets.add(Charset(63, "binary", "binary", True))
+_charsets.add(Charset(64, "armscii8", "armscii8_bin"))
+_charsets.add(Charset(65, "ascii", "ascii_bin"))
+_charsets.add(Charset(66, "cp1250", "cp1250_bin"))
+_charsets.add(Charset(67, "cp1256", "cp1256_bin"))
+_charsets.add(Charset(68, "cp866", "cp866_bin"))
+_charsets.add(Charset(69, "dec8", "dec8_bin"))
+_charsets.add(Charset(70, "greek", "greek_bin"))
+_charsets.add(Charset(71, "hebrew", "hebrew_bin"))
+_charsets.add(Charset(72, "hp8", "hp8_bin"))
+_charsets.add(Charset(73, "keybcs2", "keybcs2_bin"))
+_charsets.add(Charset(74, "koi8r", "koi8r_bin"))
+_charsets.add(Charset(75, "koi8u", "koi8u_bin"))
+_charsets.add(Charset(76, "utf8mb3", "utf8mb3_tolower_ci"))
+_charsets.add(Charset(77, "latin2", "latin2_bin"))
+_charsets.add(Charset(78, "latin5", "latin5_bin"))
+_charsets.add(Charset(79, "latin7", "latin7_bin"))
+_charsets.add(Charset(80, "cp850", "cp850_bin"))
+_charsets.add(Charset(81, "cp852", "cp852_bin"))
+_charsets.add(Charset(82, "swe7", "swe7_bin"))
+_charsets.add(Charset(83, "utf8mb3", "utf8mb3_bin"))
+_charsets.add(Charset(84, "big5", "big5_bin"))
+_charsets.add(Charset(85, "euckr", "euckr_bin"))
+_charsets.add(Charset(86, "gb2312", "gb2312_bin"))
+_charsets.add(Charset(87, "gbk", "gbk_bin"))
+_charsets.add(Charset(88, "sjis", "sjis_bin"))
+_charsets.add(Charset(89, "tis620", "tis620_bin"))
+_charsets.add(Charset(91, "ujis", "ujis_bin"))
+_charsets.add(Charset(92, "geostd8", "geostd8_general_ci", True))
+_charsets.add(Charset(93, "geostd8", "geostd8_bin"))
+_charsets.add(Charset(94, "latin1", "latin1_spanish_ci"))
+_charsets.add(Charset(95, "cp932", "cp932_japanese_ci", True))
+_charsets.add(Charset(96, "cp932", "cp932_bin"))
+_charsets.add(Charset(97, "eucjpms", "eucjpms_japanese_ci", True))
+_charsets.add(Charset(98, "eucjpms", "eucjpms_bin"))
+_charsets.add(Charset(99, "cp1250", "cp1250_polish_ci"))
+_charsets.add(Charset(192, "utf8mb3", "utf8mb3_unicode_ci"))
+_charsets.add(Charset(193, "utf8mb3", "utf8mb3_icelandic_ci"))
+_charsets.add(Charset(194, "utf8mb3", "utf8mb3_latvian_ci"))
+_charsets.add(Charset(195, "utf8mb3", "utf8mb3_romanian_ci"))
+_charsets.add(Charset(196, "utf8mb3", "utf8mb3_slovenian_ci"))
+_charsets.add(Charset(197, "utf8mb3", "utf8mb3_polish_ci"))
+_charsets.add(Charset(198, "utf8mb3", "utf8mb3_estonian_ci"))
+_charsets.add(Charset(199, "utf8mb3", "utf8mb3_spanish_ci"))
+_charsets.add(Charset(200, "utf8mb3", "utf8mb3_swedish_ci"))
+_charsets.add(Charset(201, "utf8mb3", "utf8mb3_turkish_ci"))
+_charsets.add(Charset(202, "utf8mb3", "utf8mb3_czech_ci"))
+_charsets.add(Charset(203, "utf8mb3", "utf8mb3_danish_ci"))
+_charsets.add(Charset(204, "utf8mb3", "utf8mb3_lithuanian_ci"))
+_charsets.add(Charset(205, "utf8mb3", "utf8mb3_slovak_ci"))
+_charsets.add(Charset(206, "utf8mb3", "utf8mb3_spanish2_ci"))
+_charsets.add(Charset(207, "utf8mb3", "utf8mb3_roman_ci"))
+_charsets.add(Charset(208, "utf8mb3", "utf8mb3_persian_ci"))
+_charsets.add(Charset(209, "utf8mb3", "utf8mb3_esperanto_ci"))
+_charsets.add(Charset(210, "utf8mb3", "utf8mb3_hungarian_ci"))
+_charsets.add(Charset(211, "utf8mb3", "utf8mb3_sinhala_ci"))
+_charsets.add(Charset(212, "utf8mb3", "utf8mb3_german2_ci"))
+_charsets.add(Charset(213, "utf8mb3", "utf8mb3_croatian_ci"))
+_charsets.add(Charset(214, "utf8mb3", "utf8mb3_unicode_520_ci"))
+_charsets.add(Charset(215, "utf8mb3", "utf8mb3_vietnamese_ci"))
+_charsets.add(Charset(223, "utf8mb3", "utf8mb3_general_mysql500_ci"))
+_charsets.add(Charset(224, "utf8mb4", "utf8mb4_unicode_ci"))
+_charsets.add(Charset(225, "utf8mb4", "utf8mb4_icelandic_ci"))
+_charsets.add(Charset(226, "utf8mb4", "utf8mb4_latvian_ci"))
+_charsets.add(Charset(227, "utf8mb4", "utf8mb4_romanian_ci"))
+_charsets.add(Charset(228, "utf8mb4", "utf8mb4_slovenian_ci"))
+_charsets.add(Charset(229, "utf8mb4", "utf8mb4_polish_ci"))
+_charsets.add(Charset(230, "utf8mb4", "utf8mb4_estonian_ci"))
+_charsets.add(Charset(231, "utf8mb4", "utf8mb4_spanish_ci"))
+_charsets.add(Charset(232, "utf8mb4", "utf8mb4_swedish_ci"))
+_charsets.add(Charset(233, "utf8mb4", "utf8mb4_turkish_ci"))
+_charsets.add(Charset(234, "utf8mb4", "utf8mb4_czech_ci"))
+_charsets.add(Charset(235, "utf8mb4", "utf8mb4_danish_ci"))
+_charsets.add(Charset(236, "utf8mb4", "utf8mb4_lithuanian_ci"))
+_charsets.add(Charset(237, "utf8mb4", "utf8mb4_slovak_ci"))
+_charsets.add(Charset(238, "utf8mb4", "utf8mb4_spanish2_ci"))
+_charsets.add(Charset(239, "utf8mb4", "utf8mb4_roman_ci"))
+_charsets.add(Charset(240, "utf8mb4", "utf8mb4_persian_ci"))
+_charsets.add(Charset(241, "utf8mb4", "utf8mb4_esperanto_ci"))
+_charsets.add(Charset(242, "utf8mb4", "utf8mb4_hungarian_ci"))
+_charsets.add(Charset(243, "utf8mb4", "utf8mb4_sinhala_ci"))
+_charsets.add(Charset(244, "utf8mb4", "utf8mb4_german2_ci"))
+_charsets.add(Charset(245, "utf8mb4", "utf8mb4_croatian_ci"))
+_charsets.add(Charset(246, "utf8mb4", "utf8mb4_unicode_520_ci"))
+_charsets.add(Charset(247, "utf8mb4", "utf8mb4_vietnamese_ci"))
+_charsets.add(Charset(248, "gb18030", "gb18030_chinese_ci", True))
+_charsets.add(Charset(249, "gb18030", "gb18030_bin"))
+_charsets.add(Charset(250, "gb18030", "gb18030_unicode_520_ci"))
+_charsets.add(Charset(255, "utf8mb4", "utf8mb4_0900_ai_ci"))
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/connections.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/connections.py"
new file mode 100644
index 0000000..99fcfcd
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/connections.py"
@@ -0,0 +1,1435 @@
+# Python implementation of the MySQL client-server protocol
+# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
+# Error codes:
+# https://dev.mysql.com/doc/refman/5.5/en/error-handling.html
+import errno
+import os
+import socket
+import struct
+import sys
+import traceback
+import warnings
+
+from . import _auth
+
+from .charset import charset_by_name, charset_by_id
+from .constants import CLIENT, COMMAND, CR, ER, FIELD_TYPE, SERVER_STATUS
+from . import converters
+from .cursors import Cursor
+from .optionfile import Parser
+from .protocol import (
+ dump_packet,
+ MysqlPacket,
+ FieldDescriptorPacket,
+ OKPacketWrapper,
+ EOFPacketWrapper,
+ LoadLocalPacketWrapper,
+)
+from . import err, VERSION_STRING
+
+try:
+ import ssl
+
+ SSL_ENABLED = True
+except ImportError:
+ ssl = None
+ SSL_ENABLED = False
+
+try:
+ import getpass
+
+ DEFAULT_USER = getpass.getuser()
+ del getpass
+except (ImportError, KeyError, OSError):
+ # When there's no entry in OS database for a current user:
+ # KeyError is raised in Python 3.12 and below.
+ # OSError is raised in Python 3.13+
+ DEFAULT_USER = None
+
+DEBUG = False
+_DEFAULT_AUTH_PLUGIN = None # if this is not None, use it instead of server's default.
+
+TEXT_TYPES = {
+ FIELD_TYPE.BIT,
+ FIELD_TYPE.BLOB,
+ FIELD_TYPE.LONG_BLOB,
+ FIELD_TYPE.MEDIUM_BLOB,
+ FIELD_TYPE.STRING,
+ FIELD_TYPE.TINY_BLOB,
+ FIELD_TYPE.VAR_STRING,
+ FIELD_TYPE.VARCHAR,
+ FIELD_TYPE.GEOMETRY,
+}
+
+
+DEFAULT_CHARSET = "utf8mb4"
+
+MAX_PACKET_LEN = 2**24 - 1
+
+
+def _pack_int24(n):
+ return struct.pack("<I", n)[:3]
+
+
+# https://dev.mysql.com/doc/internals/en/integer.html#packet-Protocol::LengthEncodedInteger
+def _lenenc_int(i):
+ if i < 0:
+ raise ValueError(
+ "Encoding %d is less than 0 - no representation in LengthEncodedInteger" % i
+ )
+ elif i < 0xFB:
+ return bytes([i])
+ elif i < (1 << 16):
+ return b"\xfc" + struct.pack("<H", i)
+ elif i < (1 << 24):
+ return b"\xfd" + struct.pack("<I", i)[:3]
+ elif i < (1 << 64):
+ return b"\xfe" + struct.pack("<Q", i)
+ else:
+ raise ValueError(
+ f"Encoding {i:x} is larger than {1 << 64:x} - no representation in LengthEncodedInteger"
+ )
+
+
+class Connection:
+ """
+ Representation of a socket with a mysql server.
+
+ The proper way to get an instance of this class is to call
+ connect().
+
+ Establish a connection to the MySQL database. Accepts several
+ arguments:
+
+ :param host: Host where the database server is located.
+ :param user: Username to log in as.
+ :param password: Password to use.
+ :param database: Database to use, None to not use a particular one.
+ :param port: MySQL port to use, default is usually OK. (default: 3306)
+ :param bind_address: When the client has multiple network interfaces, specify
+ the interface from which to connect to the host. Argument can be
+ a hostname or an IP address.
+ :param unix_socket: Use a unix socket rather than TCP/IP.
+ :param read_timeout: The timeout for reading from the connection in seconds.
+ (default: None - no timeout)
+ :param write_timeout: The timeout for writing to the connection in seconds.
+ (default: None - no timeout)
+ :param str charset: Charset to use.
+ :param str collation: Collation name to use.
+ :param sql_mode: Default SQL_MODE to use.
+ :param read_default_file:
+ Specifies my.cnf file to read these parameters from under the [client] section.
+ :param conv:
+ Conversion dictionary to use instead of the default one.
+ This is used to provide custom marshalling and unmarshalling of types.
+ See converters.
+ :param use_unicode:
+ Whether or not to default to unicode strings.
+ This option defaults to true.
+ :param client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
+ :param cursorclass: Custom cursor class to use.
+ :param init_command: Initial SQL statement to run when connection is established.
+ :param connect_timeout: The timeout for connecting to the database in seconds.
+ (default: 10, min: 1, max: 31536000)
+ :param ssl: A dict of arguments similar to mysql_ssl_set()'s parameters or an ssl.SSLContext.
+ :param ssl_ca: Path to the file that contains a PEM-formatted CA certificate.
+ :param ssl_cert: Path to the file that contains a PEM-formatted client certificate.
+ :param ssl_disabled: A boolean value that disables usage of TLS.
+ :param ssl_key: Path to the file that contains a PEM-formatted private key for
+ the client certificate.
+ :param ssl_key_password: The password for the client certificate private key.
+ :param ssl_verify_cert: Set to true to check the server certificate's validity.
+ :param ssl_verify_identity: Set to true to check the server's identity.
+ :param read_default_group: Group to read from in the configuration file.
+ :param autocommit: Autocommit mode. None means use server default. (default: False)
+ :param local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False)
+ :param max_allowed_packet: Max size of packet sent to server in bytes. (default: 16MB)
+ Only used to limit size of "LOAD LOCAL INFILE" data packet smaller than default (16KB).
+ :param defer_connect: Don't explicitly connect on construction - wait for connect call.
+ (default: False)
+ :param auth_plugin_map: A dict of plugin names to a class that processes that plugin.
+ The class will take the Connection object as the argument to the constructor.
+ The class needs an authenticate method taking an authentication packet as
+ an argument. For the dialog plugin, a prompt(echo, prompt) method can be used
+ (if no authenticate method) for returning a string from the user. (experimental)
+ :param server_public_key: SHA256 authentication plugin public key value. (default: None)
+ :param binary_prefix: Add _binary prefix on bytes and bytearray. (default: False)
+ :param compress: Not supported.
+ :param named_pipe: Not supported.
+ :param db: **DEPRECATED** Alias for database.
+ :param passwd: **DEPRECATED** Alias for password.
+
+ See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_ in the
+ specification.
+ """
+
+ _sock = None
+ _rfile = None
+ _auth_plugin_name = ""
+ _closed = False
+ _secure = False
+
+ def __init__(
+ self,
+ *,
+ user=None, # The first four arguments is based on DB-API 2.0 recommendation.
+ password="",
+ host=None,
+ database=None,
+ unix_socket=None,
+ port=0,
+ charset="",
+ collation=None,
+ sql_mode=None,
+ read_default_file=None,
+ conv=None,
+ use_unicode=True,
+ client_flag=0,
+ cursorclass=Cursor,
+ init_command=None,
+ connect_timeout=10,
+ read_default_group=None,
+ autocommit=False,
+ local_infile=False,
+ max_allowed_packet=16 * 1024 * 1024,
+ defer_connect=False,
+ auth_plugin_map=None,
+ read_timeout=None,
+ write_timeout=None,
+ bind_address=None,
+ binary_prefix=False,
+ program_name=None,
+ server_public_key=None,
+ ssl=None,
+ ssl_ca=None,
+ ssl_cert=None,
+ ssl_disabled=None,
+ ssl_key=None,
+ ssl_key_password=None,
+ ssl_verify_cert=None,
+ ssl_verify_identity=None,
+ compress=None, # not supported
+ named_pipe=None, # not supported
+ passwd=None, # deprecated
+ db=None, # deprecated
+ ):
+ if db is not None and database is None:
+ # We will raise warning in 2022 or later.
+ # See https://github.com/PyMySQL/PyMySQL/issues/939
+ # warnings.warn("'db' is deprecated, use 'database'", DeprecationWarning, 3)
+ database = db
+ if passwd is not None and not password:
+ # We will raise warning in 2022 or later.
+ # See https://github.com/PyMySQL/PyMySQL/issues/939
+ # warnings.warn(
+ # "'passwd' is deprecated, use 'password'", DeprecationWarning, 3
+ # )
+ password = passwd
+
+ if compress or named_pipe:
+ raise NotImplementedError(
+ "compress and named_pipe arguments are not supported"
+ )
+
+ self._local_infile = bool(local_infile)
+ if self._local_infile:
+ client_flag |= CLIENT.LOCAL_FILES
+
+ if read_default_group and not read_default_file:
+ if sys.platform.startswith("win"):
+ read_default_file = "c:\\my.ini"
+ else:
+ read_default_file = "/etc/my.cnf"
+
+ if read_default_file:
+ if not read_default_group:
+ read_default_group = "client"
+
+ cfg = Parser()
+ cfg.read(os.path.expanduser(read_default_file))
+
+ def _config(key, arg):
+ if arg:
+ return arg
+ try:
+ return cfg.get(read_default_group, key)
+ except Exception:
+ return arg
+
+ user = _config("user", user)
+ password = _config("password", password)
+ host = _config("host", host)
+ database = _config("database", database)
+ unix_socket = _config("socket", unix_socket)
+ port = int(_config("port", port))
+ bind_address = _config("bind-address", bind_address)
+ charset = _config("default-character-set", charset)
+ if not ssl:
+ ssl = {}
+ if isinstance(ssl, dict):
+ for key in ["ca", "capath", "cert", "key", "password", "cipher"]:
+ value = _config("ssl-" + key, ssl.get(key))
+ if value:
+ ssl[key] = value
+
+ self.ssl = False
+ if not ssl_disabled:
+ if ssl_ca or ssl_cert or ssl_key or ssl_verify_cert or ssl_verify_identity:
+ ssl = {
+ "ca": ssl_ca,
+ "check_hostname": bool(ssl_verify_identity),
+ "verify_mode": ssl_verify_cert
+ if ssl_verify_cert is not None
+ else False,
+ }
+ if ssl_cert is not None:
+ ssl["cert"] = ssl_cert
+ if ssl_key is not None:
+ ssl["key"] = ssl_key
+ if ssl_key_password is not None:
+ ssl["password"] = ssl_key_password
+ if ssl:
+ if not SSL_ENABLED:
+ raise NotImplementedError("ssl module not found")
+ self.ssl = True
+ client_flag |= CLIENT.SSL
+ self.ctx = self._create_ssl_ctx(ssl)
+
+ self.host = host or "localhost"
+ self.port = port or 3306
+ if type(self.port) is not int:
+ raise ValueError("port should be of type int")
+ self.user = user or DEFAULT_USER
+ self.password = password or b""
+ if isinstance(self.password, str):
+ self.password = self.password.encode("latin1")
+ self.db = database
+ self.unix_socket = unix_socket
+ self.bind_address = bind_address
+ if not (0 < connect_timeout <= 31536000):
+ raise ValueError("connect_timeout should be >0 and <=31536000")
+ self.connect_timeout = connect_timeout or None
+ if read_timeout is not None and read_timeout <= 0:
+ raise ValueError("read_timeout should be > 0")
+ self._read_timeout = read_timeout
+ if write_timeout is not None and write_timeout <= 0:
+ raise ValueError("write_timeout should be > 0")
+ self._write_timeout = write_timeout
+
+ self.charset = charset or DEFAULT_CHARSET
+ self.collation = collation
+ self.use_unicode = use_unicode
+
+ self.encoding = charset_by_name(self.charset).encoding
+
+ client_flag |= CLIENT.CAPABILITIES
+ if self.db:
+ client_flag |= CLIENT.CONNECT_WITH_DB
+
+ self.client_flag = client_flag
+
+ self.cursorclass = cursorclass
+
+ self._result = None
+ self._affected_rows = 0
+ self.host_info = "Not connected"
+
+ # specified autocommit mode. None means use server default.
+ self.autocommit_mode = autocommit
+
+ if conv is None:
+ conv = converters.conversions
+
+ # Need for MySQLdb compatibility.
+ self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
+ self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
+ self.sql_mode = sql_mode
+ self.init_command = init_command
+ self.max_allowed_packet = max_allowed_packet
+ self._auth_plugin_map = auth_plugin_map or {}
+ self._binary_prefix = binary_prefix
+ self.server_public_key = server_public_key
+
+ self._connect_attrs = {
+ "_client_name": "pymysql",
+ "_client_version": VERSION_STRING,
+ "_pid": str(os.getpid()),
+ }
+
+ if program_name:
+ self._connect_attrs["program_name"] = program_name
+
+ if defer_connect:
+ self._sock = None
+ else:
+ self.connect()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ del exc_info
+ self.close()
+
+ def _create_ssl_ctx(self, sslp):
+ if isinstance(sslp, ssl.SSLContext):
+ return sslp
+ ca = sslp.get("ca")
+ capath = sslp.get("capath")
+ hasnoca = ca is None and capath is None
+ ctx = ssl.create_default_context(cafile=ca, capath=capath)
+
+ # Python 3.13 enables VERIFY_X509_STRICT by default.
+ # But self signed certificates that are generated by MySQL automatically
+ # doesn't pass the verification.
+ ctx.verify_flags &= ~ssl.VERIFY_X509_STRICT
+
+ ctx.check_hostname = not hasnoca and sslp.get("check_hostname", True)
+ verify_mode_value = sslp.get("verify_mode")
+ if verify_mode_value is None:
+ ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
+ elif isinstance(verify_mode_value, bool):
+ ctx.verify_mode = ssl.CERT_REQUIRED if verify_mode_value else ssl.CERT_NONE
+ else:
+ if isinstance(verify_mode_value, str):
+ verify_mode_value = verify_mode_value.lower()
+ if verify_mode_value in ("none", "0", "false", "no"):
+ ctx.verify_mode = ssl.CERT_NONE
+ elif verify_mode_value == "optional":
+ ctx.verify_mode = ssl.CERT_OPTIONAL
+ elif verify_mode_value in ("required", "1", "true", "yes"):
+ ctx.verify_mode = ssl.CERT_REQUIRED
+ else:
+ ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
+ if "cert" in sslp:
+ ctx.load_cert_chain(
+ sslp["cert"], keyfile=sslp.get("key"), password=sslp.get("password")
+ )
+ if "cipher" in sslp:
+ ctx.set_ciphers(sslp["cipher"])
+ ctx.options |= ssl.OP_NO_SSLv2
+ ctx.options |= ssl.OP_NO_SSLv3
+ return ctx
+
+ def close(self):
+ """
+ Send the quit message and close the socket.
+
+ See `Connection.close() <https://www.python.org/dev/peps/pep-0249/#Connection.close>`_
+ in the specification.
+
+ :raise Error: If the connection is already closed.
+ """
+ if self._closed:
+ raise err.Error("Already closed")
+ self._closed = True
+ if self._sock is None:
+ return
+ send_data = struct.pack("<iB", 1, COMMAND.COM_QUIT)
+ try:
+ self._write_bytes(send_data)
+ except Exception:
+ pass
+ finally:
+ self._force_close()
+
+ @property
+ def open(self):
+ """Return True if the connection is open."""
+ return self._sock is not None
+
+ def _force_close(self):
+ """Close connection without QUIT message."""
+ if self._rfile:
+ self._rfile.close()
+ if self._sock:
+ try:
+ self._sock.close()
+ except: # noqa
+ pass
+ self._sock = None
+ self._rfile = None
+
+ __del__ = _force_close
+
+ def autocommit(self, value):
+ self.autocommit_mode = bool(value)
+ current = self.get_autocommit()
+ if value != current:
+ self._send_autocommit_mode()
+
+ def get_autocommit(self):
+ return bool(self.server_status & SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT)
+
+ def _read_ok_packet(self):
+ pkt = self._read_packet()
+ if not pkt.is_ok_packet():
+ raise err.OperationalError(
+ CR.CR_COMMANDS_OUT_OF_SYNC,
+ "Command Out of Sync",
+ )
+ ok = OKPacketWrapper(pkt)
+ self.server_status = ok.server_status
+ return ok
+
+ def _send_autocommit_mode(self):
+ """Set whether or not to commit after every execute()."""
+ self._execute_command(
+ COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" % self.escape(self.autocommit_mode)
+ )
+ self._read_ok_packet()
+
+ def begin(self):
+ """Begin transaction."""
+ self._execute_command(COMMAND.COM_QUERY, "BEGIN")
+ self._read_ok_packet()
+
+ def commit(self):
+ """
+ Commit changes to stable storage.
+
+ See `Connection.commit() <https://www.python.org/dev/peps/pep-0249/#commit>`_
+ in the specification.
+ """
+ self._execute_command(COMMAND.COM_QUERY, "COMMIT")
+ self._read_ok_packet()
+
+ def rollback(self):
+ """
+ Roll back the current transaction.
+
+ See `Connection.rollback() <https://www.python.org/dev/peps/pep-0249/#rollback>`_
+ in the specification.
+ """
+ self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
+ self._read_ok_packet()
+
+ def show_warnings(self):
+ """Send the "SHOW WARNINGS" SQL command."""
+ self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
+ result = MySQLResult(self)
+ result.read()
+ return result.rows
+
+ def select_db(self, db):
+ """
+ Set current db.
+
+ :param db: The name of the db.
+ """
+ self._execute_command(COMMAND.COM_INIT_DB, db)
+ self._read_ok_packet()
+
+ def escape(self, obj, mapping=None):
+ """Escape whatever value is passed.
+
+ Non-standard, for internal use; do not use this in your applications.
+ """
+ if isinstance(obj, str):
+ return "'" + self.escape_string(obj) + "'"
+ if isinstance(obj, (bytes, bytearray)):
+ ret = self._quote_bytes(obj)
+ if self._binary_prefix:
+ ret = "_binary" + ret
+ return ret
+ return converters.escape_item(obj, self.charset, mapping=mapping)
+
+ def literal(self, obj):
+ """Alias for escape().
+
+ Non-standard, for internal use; do not use this in your applications.
+ """
+ return self.escape(obj, self.encoders)
+
+ def escape_string(self, s):
+ if self.server_status & SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES:
+ return s.replace("'", "''")
+ return converters.escape_string(s)
+
+ def _quote_bytes(self, s):
+ if self.server_status & SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES:
+ return "'{}'".format(
+ s.replace(b"'", b"''").decode("ascii", "surrogateescape")
+ )
+ return converters.escape_bytes(s)
+
+ def cursor(self, cursor=None):
+ """
+ Create a new cursor to execute queries with.
+
+ :param cursor: The type of cursor to create. None means use Cursor.
+ :type cursor: :py:class:`Cursor`, :py:class:`SSCursor`, :py:class:`DictCursor`,
+ or :py:class:`SSDictCursor`.
+ """
+ if cursor:
+ return cursor(self)
+ return self.cursorclass(self)
+
+ # The following methods are INTERNAL USE ONLY (called from Cursor)
+ def query(self, sql, unbuffered=False):
+ # if DEBUG:
+ # print("DEBUG: sending query:", sql)
+ if isinstance(sql, str):
+ sql = sql.encode(self.encoding, "surrogateescape")
+ self._execute_command(COMMAND.COM_QUERY, sql)
+ self._affected_rows = self._read_query_result(unbuffered=unbuffered)
+ return self._affected_rows
+
+ def next_result(self, unbuffered=False):
+ self._affected_rows = self._read_query_result(unbuffered=unbuffered)
+ return self._affected_rows
+
+ def affected_rows(self):
+ return self._affected_rows
+
+ def kill(self, thread_id):
+ if not isinstance(thread_id, int):
+ raise TypeError("thread_id must be an integer")
+ self.query(f"KILL {thread_id:d}")
+
+ def ping(self, reconnect=True):
+ """
+ Check if the server is alive.
+
+ :param reconnect: If the connection is closed, reconnect.
+ :type reconnect: boolean
+
+ :raise Error: If the connection is closed and reconnect=False.
+ """
+ if self._sock is None:
+ if reconnect:
+ self.connect()
+ reconnect = False
+ else:
+ raise err.Error("Already closed")
+ try:
+ self._execute_command(COMMAND.COM_PING, "")
+ self._read_ok_packet()
+ except Exception:
+ if reconnect:
+ self.connect()
+ self.ping(False)
+ else:
+ raise
+
+ def set_charset(self, charset):
+ """Deprecated. Use set_character_set() instead."""
+ # This function has been implemented in old PyMySQL.
+ # But this name is different from MySQLdb.
+ # So we keep this function for compatibility and add
+ # new set_character_set() function.
+ self.set_character_set(charset)
+
+ def set_character_set(self, charset, collation=None):
+ """
+ Set charaset (and collation)
+
+ Send "SET NAMES charset [COLLATE collation]" query.
+ Update Connection.encoding based on charset.
+ """
+ # Make sure charset is supported.
+ encoding = charset_by_name(charset).encoding
+
+ if collation:
+ query = f"SET NAMES {charset} COLLATE {collation}"
+ else:
+ query = f"SET NAMES {charset}"
+ self._execute_command(COMMAND.COM_QUERY, query)
+ self._read_packet()
+ self.charset = charset
+ self.encoding = encoding
+ self.collation = collation
+
+ def connect(self, sock=None):
+ self._closed = False
+ try:
+ if sock is None:
+ if self.unix_socket:
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.settimeout(self.connect_timeout)
+ sock.connect(self.unix_socket)
+ self.host_info = "Localhost via UNIX socket"
+ self._secure = True
+ if DEBUG:
+ print("connected using unix_socket")
+ else:
+ kwargs = {}
+ if self.bind_address is not None:
+ kwargs["source_address"] = (self.bind_address, 0)
+ while True:
+ try:
+ sock = socket.create_connection(
+ (self.host, self.port), self.connect_timeout, **kwargs
+ )
+ break
+ except OSError as e:
+ if e.errno == errno.EINTR:
+ continue
+ raise
+ self.host_info = "socket %s:%d" % (self.host, self.port)
+ if DEBUG:
+ print("connected using socket")
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+ sock.settimeout(None)
+
+ self._sock = sock
+ self._rfile = sock.makefile("rb")
+ self._next_seq_id = 0
+
+ self._get_server_information()
+ self._request_authentication()
+
+ # Send "SET NAMES" query on init for:
+ # - Ensure charaset (and collation) is set to the server.
+ # - collation_id in handshake packet may be ignored.
+ # - If collation is not specified, we don't know what is server's
+ # default collation for the charset. For example, default collation
+ # of utf8mb4 is:
+ # - MySQL 5.7, MariaDB 10.x: utf8mb4_general_ci
+ # - MySQL 8.0: utf8mb4_0900_ai_ci
+ #
+ # Reference:
+ # - https://github.com/PyMySQL/PyMySQL/issues/1092
+ # - https://github.com/wagtail/wagtail/issues/9477
+ # - https://zenn.dev/methane/articles/2023-mysql-collation (Japanese)
+ self.set_character_set(self.charset, self.collation)
+
+ if self.sql_mode is not None:
+ c = self.cursor()
+ c.execute("SET sql_mode=%s", (self.sql_mode,))
+ c.close()
+
+ if self.init_command is not None:
+ c = self.cursor()
+ c.execute(self.init_command)
+ c.close()
+
+ if self.autocommit_mode is not None:
+ self.autocommit(self.autocommit_mode)
+ except BaseException as e:
+ self._force_close()
+
+ if isinstance(e, (OSError, IOError)):
+ exc = err.OperationalError(
+ CR.CR_CONN_HOST_ERROR,
+ f"Can't connect to MySQL server on {self.host!r} ({e})",
+ )
+ # Keep original exception and traceback to investigate error.
+ exc.original_exception = e
+ exc.traceback = traceback.format_exc()
+ if DEBUG:
+ print(exc.traceback)
+ raise exc
+
+ # If e is neither DatabaseError or IOError, It's a bug.
+ # But raising AssertionError hides original error.
+ # So just reraise it.
+ raise
+
+ def write_packet(self, payload):
+ """Writes an entire "mysql packet" in its entirety to the network
+ adding its length and sequence number.
+ """
+ # Internal note: when you build packet manually and calls _write_bytes()
+ # directly, you should set self._next_seq_id properly.
+ data = _pack_int24(len(payload)) + bytes([self._next_seq_id]) + payload
+ if DEBUG:
+ dump_packet(data)
+ self._write_bytes(data)
+ self._next_seq_id = (self._next_seq_id + 1) % 256
+
+ def _read_packet(self, packet_type=MysqlPacket):
+ """Read an entire "mysql packet" in its entirety from the network
+ and return a MysqlPacket type that represents the results.
+
+ :raise OperationalError: If the connection to the MySQL server is lost.
+ :raise InternalError: If the packet sequence number is wrong.
+ """
+ buff = bytearray()
+ while True:
+ packet_header = self._read_bytes(4)
+ # if DEBUG: dump_packet(packet_header)
+
+ btrl, btrh, packet_number = struct.unpack("<HBB", packet_header)
+ bytes_to_read = btrl + (btrh << 16)
+ if packet_number != self._next_seq_id:
+ self._force_close()
+ if packet_number == 0:
+ # MariaDB sends error packet with seqno==0 when shutdown
+ raise err.OperationalError(
+ CR.CR_SERVER_LOST,
+ "Lost connection to MySQL server during query",
+ )
+ raise err.InternalError(
+ "Packet sequence number wrong - got %d expected %d"
+ % (packet_number, self._next_seq_id)
+ )
+ self._next_seq_id = (self._next_seq_id + 1) % 256
+
+ recv_data = self._read_bytes(bytes_to_read)
+ if DEBUG:
+ dump_packet(recv_data)
+ buff += recv_data
+ # https://dev.mysql.com/doc/internals/en/sending-more-than-16mbyte.html
+ if bytes_to_read < MAX_PACKET_LEN:
+ break
+
+ packet = packet_type(bytes(buff), self.encoding)
+ if packet.is_error_packet():
+ if self._result is not None and self._result.unbuffered_active is True:
+ self._result.unbuffered_active = False
+ packet.raise_for_error()
+ return packet
+
+ def _read_bytes(self, num_bytes):
+ self._sock.settimeout(self._read_timeout)
+ while True:
+ try:
+ data = self._rfile.read(num_bytes)
+ break
+ except OSError as e:
+ if e.errno == errno.EINTR:
+ continue
+ self._force_close()
+ raise err.OperationalError(
+ CR.CR_SERVER_LOST,
+ f"Lost connection to MySQL server during query ({e})",
+ )
+ except BaseException:
+ # Don't convert unknown exception to MySQLError.
+ self._force_close()
+ raise
+ if len(data) < num_bytes:
+ self._force_close()
+ raise err.OperationalError(
+ CR.CR_SERVER_LOST, "Lost connection to MySQL server during query"
+ )
+ return data
+
+ def _write_bytes(self, data):
+ self._sock.settimeout(self._write_timeout)
+ try:
+ self._sock.sendall(data)
+ except OSError as e:
+ self._force_close()
+ raise err.OperationalError(
+ CR.CR_SERVER_GONE_ERROR, f"MySQL server has gone away ({e!r})"
+ )
+
+ def _read_query_result(self, unbuffered=False):
+ self._result = None
+ result = MySQLResult(self)
+ if unbuffered:
+ result.init_unbuffered_query()
+ else:
+ result.read()
+ self._result = result
+ if result.server_status is not None:
+ self.server_status = result.server_status
+ return result.affected_rows
+
+ def insert_id(self):
+ if self._result:
+ return self._result.insert_id
+ else:
+ return 0
+
+ def _execute_command(self, command, sql):
+ """
+ :raise InterfaceError: If the connection is closed.
+ :raise ValueError: If no username was specified.
+ """
+ if not self._sock:
+ raise err.InterfaceError(0, "")
+
+ # If the last query was unbuffered, make sure it finishes before
+ # sending new commands
+ if self._result is not None:
+ if self._result.unbuffered_active:
+ warnings.warn("Previous unbuffered result was left incomplete")
+ self._result._finish_unbuffered_query()
+ while self._result.has_next:
+ self.next_result()
+ self._result = None
+
+ if isinstance(sql, str):
+ sql = sql.encode(self.encoding)
+
+ packet_size = min(MAX_PACKET_LEN, len(sql) + 1) # +1 is for command
+
+ # tiny optimization: build first packet manually instead of
+ # calling self..write_packet()
+ prelude = struct.pack("<iB", packet_size, command)
+ packet = prelude + sql[: packet_size - 1]
+ self._write_bytes(packet)
+ if DEBUG:
+ dump_packet(packet)
+ self._next_seq_id = 1
+
+ if packet_size < MAX_PACKET_LEN:
+ return
+
+ sql = sql[packet_size - 1 :]
+ while True:
+ packet_size = min(MAX_PACKET_LEN, len(sql))
+ self.write_packet(sql[:packet_size])
+ sql = sql[packet_size:]
+ if not sql and packet_size < MAX_PACKET_LEN:
+ break
+
+ def _request_authentication(self):
+ # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse
+ if int(self.server_version.split(".", 1)[0]) >= 5:
+ self.client_flag |= CLIENT.MULTI_RESULTS
+
+ if self.user is None:
+ raise ValueError("Did not specify a username")
+
+ charset_id = charset_by_name(self.charset).id
+ if isinstance(self.user, str):
+ self.user = self.user.encode(self.encoding)
+
+ data_init = struct.pack(
+ "<iIB23s", self.client_flag, MAX_PACKET_LEN, charset_id, b""
+ )
+
+ if self.ssl and self.server_capabilities & CLIENT.SSL:
+ self.write_packet(data_init)
+
+ self._sock = self.ctx.wrap_socket(self._sock, server_hostname=self.host)
+ self._rfile = self._sock.makefile("rb")
+ self._secure = True
+
+ data = data_init + self.user + b"\0"
+
+ authresp = b""
+ plugin_name = None
+
+ if self._auth_plugin_name == "":
+ plugin_name = b""
+ authresp = _auth.scramble_native_password(self.password, self.salt)
+ elif self._auth_plugin_name == "mysql_native_password":
+ plugin_name = b"mysql_native_password"
+ authresp = _auth.scramble_native_password(self.password, self.salt)
+ elif self._auth_plugin_name == "caching_sha2_password":
+ plugin_name = b"caching_sha2_password"
+ if self.password:
+ if DEBUG:
+ print("caching_sha2: trying fast path")
+ authresp = _auth.scramble_caching_sha2(self.password, self.salt)
+ else:
+ if DEBUG:
+ print("caching_sha2: empty password")
+ elif self._auth_plugin_name == "sha256_password":
+ plugin_name = b"sha256_password"
+ if self.ssl and self.server_capabilities & CLIENT.SSL:
+ authresp = self.password + b"\0"
+ elif self.password:
+ authresp = b"\1" # request public key
+ else:
+ authresp = b"\0" # empty password
+
+ if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA:
+ data += _lenenc_int(len(authresp)) + authresp
+ elif self.server_capabilities & CLIENT.SECURE_CONNECTION:
+ data += struct.pack("B", len(authresp)) + authresp
+ else: # pragma: no cover - not testing against servers without secure auth (>=5.0)
+ data += authresp + b"\0"
+
+ if self.db and self.server_capabilities & CLIENT.CONNECT_WITH_DB:
+ if isinstance(self.db, str):
+ self.db = self.db.encode(self.encoding)
+ data += self.db + b"\0"
+
+ if self.server_capabilities & CLIENT.PLUGIN_AUTH:
+ data += (plugin_name or b"") + b"\0"
+
+ if self.server_capabilities & CLIENT.CONNECT_ATTRS:
+ connect_attrs = b""
+ for k, v in self._connect_attrs.items():
+ k = k.encode("utf-8")
+ connect_attrs += _lenenc_int(len(k)) + k
+ v = v.encode("utf-8")
+ connect_attrs += _lenenc_int(len(v)) + v
+ data += _lenenc_int(len(connect_attrs)) + connect_attrs
+
+ self.write_packet(data)
+ auth_packet = self._read_packet()
+
+ # if authentication method isn't accepted the first byte
+ # will have the octet 254
+ if auth_packet.is_auth_switch_request():
+ if DEBUG:
+ print("received auth switch")
+ # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
+ auth_packet.read_uint8() # 0xfe packet identifier
+ plugin_name = auth_packet.read_string()
+ if (
+ self.server_capabilities & CLIENT.PLUGIN_AUTH
+ and plugin_name is not None
+ ):
+ auth_packet = self._process_auth(plugin_name, auth_packet)
+ else:
+ raise err.OperationalError("received unknown auth switch request")
+ elif auth_packet.is_extra_auth_data():
+ if DEBUG:
+ print("received extra data")
+ # https://dev.mysql.com/doc/internals/en/successful-authentication.html
+ if self._auth_plugin_name == "caching_sha2_password":
+ auth_packet = _auth.caching_sha2_password_auth(self, auth_packet)
+ elif self._auth_plugin_name == "sha256_password":
+ auth_packet = _auth.sha256_password_auth(self, auth_packet)
+ else:
+ raise err.OperationalError(
+ "Received extra packet for auth method %r", self._auth_plugin_name
+ )
+
+ if DEBUG:
+ print("Succeed to auth")
+
+ def _process_auth(self, plugin_name, auth_packet):
+ handler = self._get_auth_plugin_handler(plugin_name)
+ if handler:
+ try:
+ return handler.authenticate(auth_packet)
+ except AttributeError:
+ if plugin_name != b"dialog":
+ raise err.OperationalError(
+ CR.CR_AUTH_PLUGIN_CANNOT_LOAD,
+ f"Authentication plugin '{plugin_name}'"
+ f" not loaded: - {type(handler)!r} missing authenticate method",
+ )
+ if plugin_name == b"caching_sha2_password":
+ return _auth.caching_sha2_password_auth(self, auth_packet)
+ elif plugin_name == b"sha256_password":
+ return _auth.sha256_password_auth(self, auth_packet)
+ elif plugin_name == b"mysql_native_password":
+ data = _auth.scramble_native_password(self.password, auth_packet.read_all())
+ elif plugin_name == b"client_ed25519":
+ data = _auth.ed25519_password(self.password, auth_packet.read_all())
+ elif plugin_name == b"mysql_old_password":
+ data = (
+ _auth.scramble_old_password(self.password, auth_packet.read_all())
+ + b"\0"
+ )
+ elif plugin_name == b"mysql_clear_password":
+ # https://dev.mysql.com/doc/internals/en/clear-text-authentication.html
+ data = self.password + b"\0"
+ elif plugin_name == b"dialog":
+ pkt = auth_packet
+ while True:
+ flag = pkt.read_uint8()
+ echo = (flag & 0x06) == 0x02
+ last = (flag & 0x01) == 0x01
+ prompt = pkt.read_all()
+
+ if prompt == b"Password: ":
+ self.write_packet(self.password + b"\0")
+ elif handler:
+ resp = "no response - TypeError within plugin.prompt method"
+ try:
+ resp = handler.prompt(echo, prompt)
+ self.write_packet(resp + b"\0")
+ except AttributeError:
+ raise err.OperationalError(
+ CR.CR_AUTH_PLUGIN_CANNOT_LOAD,
+ f"Authentication plugin '{plugin_name}'"
+ f" not loaded: - {handler!r} missing prompt method",
+ )
+ except TypeError:
+ raise err.OperationalError(
+ CR.CR_AUTH_PLUGIN_ERR,
+ f"Authentication plugin '{plugin_name}'"
+ f" {handler!r} didn't respond with string. Returned '{resp!r}' to prompt {prompt!r}",
+ )
+ else:
+ raise err.OperationalError(
+ CR.CR_AUTH_PLUGIN_CANNOT_LOAD,
+ f"Authentication plugin '{plugin_name}' not configured",
+ )
+ pkt = self._read_packet()
+ pkt.check_error()
+ if pkt.is_ok_packet() or last:
+ break
+ return pkt
+ else:
+ raise err.OperationalError(
+ CR.CR_AUTH_PLUGIN_CANNOT_LOAD,
+ "Authentication plugin '%s' not configured" % plugin_name,
+ )
+
+ self.write_packet(data)
+ pkt = self._read_packet()
+ pkt.check_error()
+ return pkt
+
+ def _get_auth_plugin_handler(self, plugin_name):
+ plugin_class = self._auth_plugin_map.get(plugin_name)
+ if not plugin_class and isinstance(plugin_name, bytes):
+ plugin_class = self._auth_plugin_map.get(plugin_name.decode("ascii"))
+ if plugin_class:
+ try:
+ handler = plugin_class(self)
+ except TypeError:
+ raise err.OperationalError(
+ CR.CR_AUTH_PLUGIN_CANNOT_LOAD,
+ f"Authentication plugin '{plugin_name}'"
+ f" not loaded: - {plugin_class!r} cannot be constructed with connection object",
+ )
+ else:
+ handler = None
+ return handler
+
+ # _mysql support
+ def thread_id(self):
+ return self.server_thread_id[0]
+
+ def character_set_name(self):
+ return self.charset
+
+ def get_host_info(self):
+ return self.host_info
+
+ def get_proto_info(self):
+ return self.protocol_version
+
+ def _get_server_information(self):
+ i = 0
+ packet = self._read_packet()
+ data = packet.get_all_data()
+
+ self.protocol_version = data[i]
+ i += 1
+
+ server_end = data.find(b"\0", i)
+ self.server_version = data[i:server_end].decode("latin1")
+ i = server_end + 1
+
+ self.server_thread_id = struct.unpack("<I", data[i : i + 4])
+ i += 4
+
+ self.salt = data[i : i + 8]
+ i += 9 # 8 + 1(filler)
+
+ self.server_capabilities = struct.unpack("<H", data[i : i + 2])[0]
+ i += 2
+
+ if len(data) >= i + 6:
+ lang, stat, cap_h, salt_len = struct.unpack("<BHHB", data[i : i + 6])
+ i += 6
+ # TODO: deprecate server_language and server_charset.
+ # mysqlclient-python doesn't provide it.
+ self.server_language = lang
+ try:
+ self.server_charset = charset_by_id(lang).name
+ except KeyError:
+ # unknown collation
+ self.server_charset = None
+
+ self.server_status = stat
+ if DEBUG:
+ print("server_status: %x" % stat)
+
+ self.server_capabilities |= cap_h << 16
+ if DEBUG:
+ print("salt_len:", salt_len)
+ salt_len = max(12, salt_len - 9)
+
+ # reserved
+ i += 10
+
+ if len(data) >= i + salt_len:
+ # salt_len includes auth_plugin_data_part_1 and filler
+ self.salt += data[i : i + salt_len]
+ i += salt_len
+
+ i += 1
+ # AUTH PLUGIN NAME may appear here.
+ if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i:
+ # Due to Bug#59453 the auth-plugin-name is missing the terminating
+ # NUL-char in versions prior to 5.5.10 and 5.6.2.
+ # ref: https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::Handshake
+ # didn't use version checks as mariadb is corrected and reports
+ # earlier than those two.
+ server_end = data.find(b"\0", i)
+ if server_end < 0: # pragma: no cover - very specific upstream bug
+ # not found \0 and last field so take it all
+ self._auth_plugin_name = data[i:].decode("utf-8")
+ else:
+ self._auth_plugin_name = data[i:server_end].decode("utf-8")
+
+ if _DEFAULT_AUTH_PLUGIN is not None: # for tests
+ self._auth_plugin_name = _DEFAULT_AUTH_PLUGIN
+
+ def get_server_info(self):
+ return self.server_version
+
+ Warning = err.Warning
+ Error = err.Error
+ InterfaceError = err.InterfaceError
+ DatabaseError = err.DatabaseError
+ DataError = err.DataError
+ OperationalError = err.OperationalError
+ IntegrityError = err.IntegrityError
+ InternalError = err.InternalError
+ ProgrammingError = err.ProgrammingError
+ NotSupportedError = err.NotSupportedError
+
+
+class MySQLResult:
+ def __init__(self, connection):
+ """
+ :type connection: Connection
+ """
+ self.connection = connection
+ self.affected_rows = None
+ self.insert_id = None
+ self.server_status = None
+ self.warning_count = 0
+ self.message = None
+ self.field_count = 0
+ self.description = None
+ self.rows = None
+ self.has_next = None
+ self.unbuffered_active = False
+
+ def __del__(self):
+ if self.unbuffered_active:
+ self._finish_unbuffered_query()
+
+ def read(self):
+ try:
+ first_packet = self.connection._read_packet()
+
+ if first_packet.is_ok_packet():
+ self._read_ok_packet(first_packet)
+ elif first_packet.is_load_local_packet():
+ self._read_load_local_packet(first_packet)
+ else:
+ self._read_result_packet(first_packet)
+ finally:
+ self.connection = None
+
+ def init_unbuffered_query(self):
+ """
+ :raise OperationalError: If the connection to the MySQL server is lost.
+ :raise InternalError:
+ """
+ first_packet = self.connection._read_packet()
+
+ if first_packet.is_ok_packet():
+ self.connection = None
+ self._read_ok_packet(first_packet)
+ elif first_packet.is_load_local_packet():
+ try:
+ self._read_load_local_packet(first_packet)
+ finally:
+ self.connection = None
+ else:
+ self.field_count = first_packet.read_length_encoded_integer()
+ self._get_descriptions()
+
+ # Apparently, MySQLdb picks this number because it's the maximum
+ # value of a 64bit unsigned integer. Since we're emulating MySQLdb,
+ # we set it to this instead of None, which would be preferred.
+ self.affected_rows = 18446744073709551615
+ self.unbuffered_active = True
+
+ def _read_ok_packet(self, first_packet):
+ ok_packet = OKPacketWrapper(first_packet)
+ self.affected_rows = ok_packet.affected_rows
+ self.insert_id = ok_packet.insert_id
+ self.server_status = ok_packet.server_status
+ self.warning_count = ok_packet.warning_count
+ self.message = ok_packet.message
+ self.has_next = ok_packet.has_next
+
+ def _read_load_local_packet(self, first_packet):
+ if not self.connection._local_infile:
+ raise RuntimeError(
+ "**WARN**: Received LOAD_LOCAL packet but local_infile option is false."
+ )
+ load_packet = LoadLocalPacketWrapper(first_packet)
+ sender = LoadLocalFile(load_packet.filename, self.connection)
+ try:
+ sender.send_data()
+ except:
+ self.connection._read_packet() # skip ok packet
+ raise
+
+ ok_packet = self.connection._read_packet()
+ if (
+ not ok_packet.is_ok_packet()
+ ): # pragma: no cover - upstream induced protocol error
+ raise err.OperationalError(
+ CR.CR_COMMANDS_OUT_OF_SYNC,
+ "Commands Out of Sync",
+ )
+ self._read_ok_packet(ok_packet)
+
+ def _check_packet_is_eof(self, packet):
+ if not packet.is_eof_packet():
+ return False
+ # TODO: Support CLIENT.DEPRECATE_EOF
+ # 1) Add DEPRECATE_EOF to CAPABILITIES
+ # 2) Mask CAPABILITIES with server_capabilities
+ # 3) if server_capabilities & CLIENT.DEPRECATE_EOF:
+ # use OKPacketWrapper instead of EOFPacketWrapper
+ wp = EOFPacketWrapper(packet)
+ self.warning_count = wp.warning_count
+ self.has_next = wp.has_next
+ return True
+
+ def _read_result_packet(self, first_packet):
+ self.field_count = first_packet.read_length_encoded_integer()
+ self._get_descriptions()
+ self._read_rowdata_packet()
+
+ def _read_rowdata_packet_unbuffered(self):
+ # Check if in an active query
+ if not self.unbuffered_active:
+ return
+
+ # EOF
+ packet = self.connection._read_packet()
+ if self._check_packet_is_eof(packet):
+ self.unbuffered_active = False
+ self.connection = None
+ self.rows = None
+ return
+
+ row = self._read_row_from_packet(packet)
+ self.affected_rows = 1
+ self.rows = (row,) # rows should tuple of row for MySQL-python compatibility.
+ return row
+
+ def _finish_unbuffered_query(self):
+ # After much reading on the MySQL protocol, it appears that there is,
+ # in fact, no way to stop MySQL from sending all the data after
+ # executing a query, so we just spin, and wait for an EOF packet.
+ while self.unbuffered_active:
+ try:
+ packet = self.connection._read_packet()
+ except err.OperationalError as e:
+ if e.args[0] in (
+ ER.QUERY_TIMEOUT,
+ ER.STATEMENT_TIMEOUT,
+ ):
+ # if the query timed out we can simply ignore this error
+ self.unbuffered_active = False
+ self.connection = None
+ return
+
+ raise
+
+ if self._check_packet_is_eof(packet):
+ self.unbuffered_active = False
+ self.connection = None # release reference to kill cyclic reference.
+
+ def _read_rowdata_packet(self):
+ """Read a rowdata packet for each data row in the result set."""
+ rows = []
+ while True:
+ packet = self.connection._read_packet()
+ if self._check_packet_is_eof(packet):
+ self.connection = None # release reference to kill cyclic reference.
+ break
+ rows.append(self._read_row_from_packet(packet))
+
+ self.affected_rows = len(rows)
+ self.rows = tuple(rows)
+
+ def _read_row_from_packet(self, packet):
+ row = []
+ for encoding, converter in self.converters:
+ try:
+ data = packet.read_length_coded_string()
+ except IndexError:
+ # No more columns in this row
+ # See https://github.com/PyMySQL/PyMySQL/pull/434
+ break
+ if data is not None:
+ if encoding is not None:
+ data = data.decode(encoding)
+ if DEBUG:
+ print("DEBUG: DATA = ", data)
+ if converter is not None:
+ data = converter(data)
+ row.append(data)
+ return tuple(row)
+
+ def _get_descriptions(self):
+ """Read a column descriptor packet for each column in the result."""
+ self.fields = []
+ self.converters = []
+ use_unicode = self.connection.use_unicode
+ conn_encoding = self.connection.encoding
+ description = []
+
+ for i in range(self.field_count):
+ field = self.connection._read_packet(FieldDescriptorPacket)
+ self.fields.append(field)
+ description.append(field.description())
+ field_type = field.type_code
+ if use_unicode:
+ if field_type == FIELD_TYPE.JSON:
+ # When SELECT from JSON column: charset = binary
+ # When SELECT CAST(... AS JSON): charset = connection encoding
+ # This behavior is different from TEXT / BLOB.
+ # We should decode result by connection encoding regardless charsetnr.
+ # See https://github.com/PyMySQL/PyMySQL/issues/488
+ encoding = conn_encoding # SELECT CAST(... AS JSON)
+ elif field_type in TEXT_TYPES:
+ if field.charsetnr == 63: # binary
+ # TEXTs with charset=binary means BINARY types.
+ encoding = None
+ else:
+ encoding = conn_encoding
+ else:
+ # Integers, Dates and Times, and other basic data is encoded in ascii
+ encoding = "ascii"
+ else:
+ encoding = None
+ converter = self.connection.decoders.get(field_type)
+ if converter is converters.through:
+ converter = None
+ if DEBUG:
+ print(f"DEBUG: field={field}, converter={converter}")
+ self.converters.append((encoding, converter))
+
+ eof_packet = self.connection._read_packet()
+ assert eof_packet.is_eof_packet(), "Protocol error, expecting EOF"
+ self.description = tuple(description)
+
+
+class LoadLocalFile:
+ def __init__(self, filename, connection):
+ self.filename = filename
+ self.connection = connection
+
+ def send_data(self):
+ """Send data packets from the local file to the server"""
+ if not self.connection._sock:
+ raise err.InterfaceError(0, "")
+ conn: Connection = self.connection
+
+ try:
+ with open(self.filename, "rb") as open_file:
+ packet_size = min(
+ conn.max_allowed_packet, 16 * 1024
+ ) # 16KB is efficient enough
+ while True:
+ chunk = open_file.read(packet_size)
+ if not chunk:
+ break
+ conn.write_packet(chunk)
+ except OSError:
+ raise err.OperationalError(
+ ER.FILE_NOT_FOUND,
+ f"Can't find file '{self.filename}'",
+ )
+ finally:
+ if not conn._closed:
+ # send the empty packet to signify we are done sending data
+ conn.write_packet(b"")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/CLIENT.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/CLIENT.py"
new file mode 100644
index 0000000..34fe57a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/CLIENT.py"
@@ -0,0 +1,38 @@
+# https://dev.mysql.com/doc/internals/en/capability-flags.html#packet-Protocol::CapabilityFlags
+LONG_PASSWORD = 1
+FOUND_ROWS = 1 << 1
+LONG_FLAG = 1 << 2
+CONNECT_WITH_DB = 1 << 3
+NO_SCHEMA = 1 << 4
+COMPRESS = 1 << 5
+ODBC = 1 << 6
+LOCAL_FILES = 1 << 7
+IGNORE_SPACE = 1 << 8
+PROTOCOL_41 = 1 << 9
+INTERACTIVE = 1 << 10
+SSL = 1 << 11
+IGNORE_SIGPIPE = 1 << 12
+TRANSACTIONS = 1 << 13
+SECURE_CONNECTION = 1 << 15
+MULTI_STATEMENTS = 1 << 16
+MULTI_RESULTS = 1 << 17
+PS_MULTI_RESULTS = 1 << 18
+PLUGIN_AUTH = 1 << 19
+CONNECT_ATTRS = 1 << 20
+PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21
+CAPABILITIES = (
+ LONG_PASSWORD
+ | LONG_FLAG
+ | PROTOCOL_41
+ | TRANSACTIONS
+ | SECURE_CONNECTION
+ | MULTI_RESULTS
+ | PLUGIN_AUTH
+ | PLUGIN_AUTH_LENENC_CLIENT_DATA
+ | CONNECT_ATTRS
+)
+
+# Not done yet
+HANDLE_EXPIRED_PASSWORDS = 1 << 22
+SESSION_TRACK = 1 << 23
+DEPRECATE_EOF = 1 << 24
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/COMMAND.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/COMMAND.py"
new file mode 100644
index 0000000..2d98850
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/COMMAND.py"
@@ -0,0 +1,32 @@
+COM_SLEEP = 0x00
+COM_QUIT = 0x01
+COM_INIT_DB = 0x02
+COM_QUERY = 0x03
+COM_FIELD_LIST = 0x04
+COM_CREATE_DB = 0x05
+COM_DROP_DB = 0x06
+COM_REFRESH = 0x07
+COM_SHUTDOWN = 0x08
+COM_STATISTICS = 0x09
+COM_PROCESS_INFO = 0x0A
+COM_CONNECT = 0x0B
+COM_PROCESS_KILL = 0x0C
+COM_DEBUG = 0x0D
+COM_PING = 0x0E
+COM_TIME = 0x0F
+COM_DELAYED_INSERT = 0x10
+COM_CHANGE_USER = 0x11
+COM_BINLOG_DUMP = 0x12
+COM_TABLE_DUMP = 0x13
+COM_CONNECT_OUT = 0x14
+COM_REGISTER_SLAVE = 0x15
+COM_STMT_PREPARE = 0x16
+COM_STMT_EXECUTE = 0x17
+COM_STMT_SEND_LONG_DATA = 0x18
+COM_STMT_CLOSE = 0x19
+COM_STMT_RESET = 0x1A
+COM_SET_OPTION = 0x1B
+COM_STMT_FETCH = 0x1C
+COM_DAEMON = 0x1D
+COM_BINLOG_DUMP_GTID = 0x1E
+COM_END = 0x1F
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/CR.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/CR.py"
new file mode 100644
index 0000000..deae977
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/CR.py"
@@ -0,0 +1,79 @@
+# flake8: noqa
+# errmsg.h
+CR_ERROR_FIRST = 2000
+CR_UNKNOWN_ERROR = 2000
+CR_SOCKET_CREATE_ERROR = 2001
+CR_CONNECTION_ERROR = 2002
+CR_CONN_HOST_ERROR = 2003
+CR_IPSOCK_ERROR = 2004
+CR_UNKNOWN_HOST = 2005
+CR_SERVER_GONE_ERROR = 2006
+CR_VERSION_ERROR = 2007
+CR_OUT_OF_MEMORY = 2008
+CR_WRONG_HOST_INFO = 2009
+CR_LOCALHOST_CONNECTION = 2010
+CR_TCP_CONNECTION = 2011
+CR_SERVER_HANDSHAKE_ERR = 2012
+CR_SERVER_LOST = 2013
+CR_COMMANDS_OUT_OF_SYNC = 2014
+CR_NAMEDPIPE_CONNECTION = 2015
+CR_NAMEDPIPEWAIT_ERROR = 2016
+CR_NAMEDPIPEOPEN_ERROR = 2017
+CR_NAMEDPIPESETSTATE_ERROR = 2018
+CR_CANT_READ_CHARSET = 2019
+CR_NET_PACKET_TOO_LARGE = 2020
+CR_EMBEDDED_CONNECTION = 2021
+CR_PROBE_SLAVE_STATUS = 2022
+CR_PROBE_SLAVE_HOSTS = 2023
+CR_PROBE_SLAVE_CONNECT = 2024
+CR_PROBE_MASTER_CONNECT = 2025
+CR_SSL_CONNECTION_ERROR = 2026
+CR_MALFORMED_PACKET = 2027
+CR_WRONG_LICENSE = 2028
+
+CR_NULL_POINTER = 2029
+CR_NO_PREPARE_STMT = 2030
+CR_PARAMS_NOT_BOUND = 2031
+CR_DATA_TRUNCATED = 2032
+CR_NO_PARAMETERS_EXISTS = 2033
+CR_INVALID_PARAMETER_NO = 2034
+CR_INVALID_BUFFER_USE = 2035
+CR_UNSUPPORTED_PARAM_TYPE = 2036
+
+CR_SHARED_MEMORY_CONNECTION = 2037
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
+CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
+CR_SHARED_MEMORY_MAP_ERROR = 2043
+CR_SHARED_MEMORY_EVENT_ERROR = 2044
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
+CR_CONN_UNKNOW_PROTOCOL = 2047
+CR_INVALID_CONN_HANDLE = 2048
+CR_SECURE_AUTH = 2049
+CR_FETCH_CANCELED = 2050
+CR_NO_DATA = 2051
+CR_NO_STMT_METADATA = 2052
+CR_NO_RESULT_SET = 2053
+CR_NOT_IMPLEMENTED = 2054
+CR_SERVER_LOST_EXTENDED = 2055
+CR_STMT_CLOSED = 2056
+CR_NEW_STMT_METADATA = 2057
+CR_ALREADY_CONNECTED = 2058
+CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
+CR_DUPLICATE_CONNECTION_ATTR = 2060
+CR_AUTH_PLUGIN_ERR = 2061
+CR_INSECURE_API_ERR = 2062
+CR_FILE_NAME_TOO_LONG = 2063
+CR_SSL_FIPS_MODE_ERR = 2064
+CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = 2065
+CR_COMPRESSION_WRONGLY_CONFIGURED = 2066
+CR_KERBEROS_USER_NOT_FOUND = 2067
+CR_LOAD_DATA_LOCAL_INFILE_REJECTED = 2068
+CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = 2069
+CR_DNS_SRV_LOOKUP_FAILED = 2070
+CR_MANDATORY_TRACKER_NOT_FOUND = 2071
+CR_INVALID_FACTOR_NO = 2072
+CR_ERROR_LAST = 2072
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/ER.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/ER.py"
new file mode 100644
index 0000000..98729d1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/ER.py"
@@ -0,0 +1,477 @@
+ERROR_FIRST = 1000
+HASHCHK = 1000
+NISAMCHK = 1001
+NO = 1002
+YES = 1003
+CANT_CREATE_FILE = 1004
+CANT_CREATE_TABLE = 1005
+CANT_CREATE_DB = 1006
+DB_CREATE_EXISTS = 1007
+DB_DROP_EXISTS = 1008
+DB_DROP_DELETE = 1009
+DB_DROP_RMDIR = 1010
+CANT_DELETE_FILE = 1011
+CANT_FIND_SYSTEM_REC = 1012
+CANT_GET_STAT = 1013
+CANT_GET_WD = 1014
+CANT_LOCK = 1015
+CANT_OPEN_FILE = 1016
+FILE_NOT_FOUND = 1017
+CANT_READ_DIR = 1018
+CANT_SET_WD = 1019
+CHECKREAD = 1020
+DISK_FULL = 1021
+DUP_KEY = 1022
+ERROR_ON_CLOSE = 1023
+ERROR_ON_READ = 1024
+ERROR_ON_RENAME = 1025
+ERROR_ON_WRITE = 1026
+FILE_USED = 1027
+FILSORT_ABORT = 1028
+FORM_NOT_FOUND = 1029
+GET_ERRNO = 1030
+ILLEGAL_HA = 1031
+KEY_NOT_FOUND = 1032
+NOT_FORM_FILE = 1033
+NOT_KEYFILE = 1034
+OLD_KEYFILE = 1035
+OPEN_AS_READONLY = 1036
+OUTOFMEMORY = 1037
+OUT_OF_SORTMEMORY = 1038
+UNEXPECTED_EOF = 1039
+CON_COUNT_ERROR = 1040
+OUT_OF_RESOURCES = 1041
+BAD_HOST_ERROR = 1042
+HANDSHAKE_ERROR = 1043
+DBACCESS_DENIED_ERROR = 1044
+ACCESS_DENIED_ERROR = 1045
+NO_DB_ERROR = 1046
+UNKNOWN_COM_ERROR = 1047
+BAD_NULL_ERROR = 1048
+BAD_DB_ERROR = 1049
+TABLE_EXISTS_ERROR = 1050
+BAD_TABLE_ERROR = 1051
+NON_UNIQ_ERROR = 1052
+SERVER_SHUTDOWN = 1053
+BAD_FIELD_ERROR = 1054
+WRONG_FIELD_WITH_GROUP = 1055
+WRONG_GROUP_FIELD = 1056
+WRONG_SUM_SELECT = 1057
+WRONG_VALUE_COUNT = 1058
+TOO_LONG_IDENT = 1059
+DUP_FIELDNAME = 1060
+DUP_KEYNAME = 1061
+DUP_ENTRY = 1062
+WRONG_FIELD_SPEC = 1063
+PARSE_ERROR = 1064
+EMPTY_QUERY = 1065
+NONUNIQ_TABLE = 1066
+INVALID_DEFAULT = 1067
+MULTIPLE_PRI_KEY = 1068
+TOO_MANY_KEYS = 1069
+TOO_MANY_KEY_PARTS = 1070
+TOO_LONG_KEY = 1071
+KEY_COLUMN_DOES_NOT_EXITS = 1072
+BLOB_USED_AS_KEY = 1073
+TOO_BIG_FIELDLENGTH = 1074
+WRONG_AUTO_KEY = 1075
+READY = 1076
+NORMAL_SHUTDOWN = 1077
+GOT_SIGNAL = 1078
+SHUTDOWN_COMPLETE = 1079
+FORCING_CLOSE = 1080
+IPSOCK_ERROR = 1081
+NO_SUCH_INDEX = 1082
+WRONG_FIELD_TERMINATORS = 1083
+BLOBS_AND_NO_TERMINATED = 1084
+TEXTFILE_NOT_READABLE = 1085
+FILE_EXISTS_ERROR = 1086
+LOAD_INFO = 1087
+ALTER_INFO = 1088
+WRONG_SUB_KEY = 1089
+CANT_REMOVE_ALL_FIELDS = 1090
+CANT_DROP_FIELD_OR_KEY = 1091
+INSERT_INFO = 1092
+UPDATE_TABLE_USED = 1093
+NO_SUCH_THREAD = 1094
+KILL_DENIED_ERROR = 1095
+NO_TABLES_USED = 1096
+TOO_BIG_SET = 1097
+NO_UNIQUE_LOGFILE = 1098
+TABLE_NOT_LOCKED_FOR_WRITE = 1099
+TABLE_NOT_LOCKED = 1100
+BLOB_CANT_HAVE_DEFAULT = 1101
+WRONG_DB_NAME = 1102
+WRONG_TABLE_NAME = 1103
+TOO_BIG_SELECT = 1104
+UNKNOWN_ERROR = 1105
+UNKNOWN_PROCEDURE = 1106
+WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
+WRONG_PARAMETERS_TO_PROCEDURE = 1108
+UNKNOWN_TABLE = 1109
+FIELD_SPECIFIED_TWICE = 1110
+INVALID_GROUP_FUNC_USE = 1111
+UNSUPPORTED_EXTENSION = 1112
+TABLE_MUST_HAVE_COLUMNS = 1113
+RECORD_FILE_FULL = 1114
+UNKNOWN_CHARACTER_SET = 1115
+TOO_MANY_TABLES = 1116
+TOO_MANY_FIELDS = 1117
+TOO_BIG_ROWSIZE = 1118
+STACK_OVERRUN = 1119
+WRONG_OUTER_JOIN = 1120
+NULL_COLUMN_IN_INDEX = 1121
+CANT_FIND_UDF = 1122
+CANT_INITIALIZE_UDF = 1123
+UDF_NO_PATHS = 1124
+UDF_EXISTS = 1125
+CANT_OPEN_LIBRARY = 1126
+CANT_FIND_DL_ENTRY = 1127
+FUNCTION_NOT_DEFINED = 1128
+HOST_IS_BLOCKED = 1129
+HOST_NOT_PRIVILEGED = 1130
+PASSWORD_ANONYMOUS_USER = 1131
+PASSWORD_NOT_ALLOWED = 1132
+PASSWORD_NO_MATCH = 1133
+UPDATE_INFO = 1134
+CANT_CREATE_THREAD = 1135
+WRONG_VALUE_COUNT_ON_ROW = 1136
+CANT_REOPEN_TABLE = 1137
+INVALID_USE_OF_NULL = 1138
+REGEXP_ERROR = 1139
+MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
+NONEXISTING_GRANT = 1141
+TABLEACCESS_DENIED_ERROR = 1142
+COLUMNACCESS_DENIED_ERROR = 1143
+ILLEGAL_GRANT_FOR_TABLE = 1144
+GRANT_WRONG_HOST_OR_USER = 1145
+NO_SUCH_TABLE = 1146
+NONEXISTING_TABLE_GRANT = 1147
+NOT_ALLOWED_COMMAND = 1148
+SYNTAX_ERROR = 1149
+DELAYED_CANT_CHANGE_LOCK = 1150
+TOO_MANY_DELAYED_THREADS = 1151
+ABORTING_CONNECTION = 1152
+NET_PACKET_TOO_LARGE = 1153
+NET_READ_ERROR_FROM_PIPE = 1154
+NET_FCNTL_ERROR = 1155
+NET_PACKETS_OUT_OF_ORDER = 1156
+NET_UNCOMPRESS_ERROR = 1157
+NET_READ_ERROR = 1158
+NET_READ_INTERRUPTED = 1159
+NET_ERROR_ON_WRITE = 1160
+NET_WRITE_INTERRUPTED = 1161
+TOO_LONG_STRING = 1162
+TABLE_CANT_HANDLE_BLOB = 1163
+TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
+DELAYED_INSERT_TABLE_LOCKED = 1165
+WRONG_COLUMN_NAME = 1166
+WRONG_KEY_COLUMN = 1167
+WRONG_MRG_TABLE = 1168
+DUP_UNIQUE = 1169
+BLOB_KEY_WITHOUT_LENGTH = 1170
+PRIMARY_CANT_HAVE_NULL = 1171
+TOO_MANY_ROWS = 1172
+REQUIRES_PRIMARY_KEY = 1173
+NO_RAID_COMPILED = 1174
+UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
+KEY_DOES_NOT_EXITS = 1176
+CHECK_NO_SUCH_TABLE = 1177
+CHECK_NOT_IMPLEMENTED = 1178
+CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
+ERROR_DURING_COMMIT = 1180
+ERROR_DURING_ROLLBACK = 1181
+ERROR_DURING_FLUSH_LOGS = 1182
+ERROR_DURING_CHECKPOINT = 1183
+NEW_ABORTING_CONNECTION = 1184
+DUMP_NOT_IMPLEMENTED = 1185
+FLUSH_MASTER_BINLOG_CLOSED = 1186
+INDEX_REBUILD = 1187
+MASTER = 1188
+MASTER_NET_READ = 1189
+MASTER_NET_WRITE = 1190
+FT_MATCHING_KEY_NOT_FOUND = 1191
+LOCK_OR_ACTIVE_TRANSACTION = 1192
+UNKNOWN_SYSTEM_VARIABLE = 1193
+CRASHED_ON_USAGE = 1194
+CRASHED_ON_REPAIR = 1195
+WARNING_NOT_COMPLETE_ROLLBACK = 1196
+TRANS_CACHE_FULL = 1197
+SLAVE_MUST_STOP = 1198
+SLAVE_NOT_RUNNING = 1199
+BAD_SLAVE = 1200
+MASTER_INFO = 1201
+SLAVE_THREAD = 1202
+TOO_MANY_USER_CONNECTIONS = 1203
+SET_CONSTANTS_ONLY = 1204
+LOCK_WAIT_TIMEOUT = 1205
+LOCK_TABLE_FULL = 1206
+READ_ONLY_TRANSACTION = 1207
+DROP_DB_WITH_READ_LOCK = 1208
+CREATE_DB_WITH_READ_LOCK = 1209
+WRONG_ARGUMENTS = 1210
+NO_PERMISSION_TO_CREATE_USER = 1211
+UNION_TABLES_IN_DIFFERENT_DIR = 1212
+LOCK_DEADLOCK = 1213
+TABLE_CANT_HANDLE_FT = 1214
+CANNOT_ADD_FOREIGN = 1215
+NO_REFERENCED_ROW = 1216
+ROW_IS_REFERENCED = 1217
+CONNECT_TO_MASTER = 1218
+QUERY_ON_MASTER = 1219
+ERROR_WHEN_EXECUTING_COMMAND = 1220
+WRONG_USAGE = 1221
+WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
+CANT_UPDATE_WITH_READLOCK = 1223
+MIXING_NOT_ALLOWED = 1224
+DUP_ARGUMENT = 1225
+USER_LIMIT_REACHED = 1226
+SPECIFIC_ACCESS_DENIED_ERROR = 1227
+LOCAL_VARIABLE = 1228
+GLOBAL_VARIABLE = 1229
+NO_DEFAULT = 1230
+WRONG_VALUE_FOR_VAR = 1231
+WRONG_TYPE_FOR_VAR = 1232
+VAR_CANT_BE_READ = 1233
+CANT_USE_OPTION_HERE = 1234
+NOT_SUPPORTED_YET = 1235
+MASTER_FATAL_ERROR_READING_BINLOG = 1236
+SLAVE_IGNORED_TABLE = 1237
+INCORRECT_GLOBAL_LOCAL_VAR = 1238
+WRONG_FK_DEF = 1239
+KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
+OPERAND_COLUMNS = 1241
+SUBQUERY_NO_1_ROW = 1242
+UNKNOWN_STMT_HANDLER = 1243
+CORRUPT_HELP_DB = 1244
+CYCLIC_REFERENCE = 1245
+AUTO_CONVERT = 1246
+ILLEGAL_REFERENCE = 1247
+DERIVED_MUST_HAVE_ALIAS = 1248
+SELECT_REDUCED = 1249
+TABLENAME_NOT_ALLOWED_HERE = 1250
+NOT_SUPPORTED_AUTH_MODE = 1251
+SPATIAL_CANT_HAVE_NULL = 1252
+COLLATION_CHARSET_MISMATCH = 1253
+SLAVE_WAS_RUNNING = 1254
+SLAVE_WAS_NOT_RUNNING = 1255
+TOO_BIG_FOR_UNCOMPRESS = 1256
+ZLIB_Z_MEM_ERROR = 1257
+ZLIB_Z_BUF_ERROR = 1258
+ZLIB_Z_DATA_ERROR = 1259
+CUT_VALUE_GROUP_CONCAT = 1260
+WARN_TOO_FEW_RECORDS = 1261
+WARN_TOO_MANY_RECORDS = 1262
+WARN_NULL_TO_NOTNULL = 1263
+WARN_DATA_OUT_OF_RANGE = 1264
+WARN_DATA_TRUNCATED = 1265
+WARN_USING_OTHER_HANDLER = 1266
+CANT_AGGREGATE_2COLLATIONS = 1267
+DROP_USER = 1268
+REVOKE_GRANTS = 1269
+CANT_AGGREGATE_3COLLATIONS = 1270
+CANT_AGGREGATE_NCOLLATIONS = 1271
+VARIABLE_IS_NOT_STRUCT = 1272
+UNKNOWN_COLLATION = 1273
+SLAVE_IGNORED_SSL_PARAMS = 1274
+SERVER_IS_IN_SECURE_AUTH_MODE = 1275
+WARN_FIELD_RESOLVED = 1276
+BAD_SLAVE_UNTIL_COND = 1277
+MISSING_SKIP_SLAVE = 1278
+UNTIL_COND_IGNORED = 1279
+WRONG_NAME_FOR_INDEX = 1280
+WRONG_NAME_FOR_CATALOG = 1281
+WARN_QC_RESIZE = 1282
+BAD_FT_COLUMN = 1283
+UNKNOWN_KEY_CACHE = 1284
+WARN_HOSTNAME_WONT_WORK = 1285
+UNKNOWN_STORAGE_ENGINE = 1286
+WARN_DEPRECATED_SYNTAX = 1287
+NON_UPDATABLE_TABLE = 1288
+FEATURE_DISABLED = 1289
+OPTION_PREVENTS_STATEMENT = 1290
+DUPLICATED_VALUE_IN_TYPE = 1291
+TRUNCATED_WRONG_VALUE = 1292
+TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
+INVALID_ON_UPDATE = 1294
+UNSUPPORTED_PS = 1295
+GET_ERRMSG = 1296
+GET_TEMPORARY_ERRMSG = 1297
+UNKNOWN_TIME_ZONE = 1298
+WARN_INVALID_TIMESTAMP = 1299
+INVALID_CHARACTER_STRING = 1300
+WARN_ALLOWED_PACKET_OVERFLOWED = 1301
+CONFLICTING_DECLARATIONS = 1302
+SP_NO_RECURSIVE_CREATE = 1303
+SP_ALREADY_EXISTS = 1304
+SP_DOES_NOT_EXIST = 1305
+SP_DROP_FAILED = 1306
+SP_STORE_FAILED = 1307
+SP_LILABEL_MISMATCH = 1308
+SP_LABEL_REDEFINE = 1309
+SP_LABEL_MISMATCH = 1310
+SP_UNINIT_VAR = 1311
+SP_BADSELECT = 1312
+SP_BADRETURN = 1313
+SP_BADSTATEMENT = 1314
+UPDATE_LOG_DEPRECATED_IGNORED = 1315
+UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
+QUERY_INTERRUPTED = 1317
+SP_WRONG_NO_OF_ARGS = 1318
+SP_COND_MISMATCH = 1319
+SP_NORETURN = 1320
+SP_NORETURNEND = 1321
+SP_BAD_CURSOR_QUERY = 1322
+SP_BAD_CURSOR_SELECT = 1323
+SP_CURSOR_MISMATCH = 1324
+SP_CURSOR_ALREADY_OPEN = 1325
+SP_CURSOR_NOT_OPEN = 1326
+SP_UNDECLARED_VAR = 1327
+SP_WRONG_NO_OF_FETCH_ARGS = 1328
+SP_FETCH_NO_DATA = 1329
+SP_DUP_PARAM = 1330
+SP_DUP_VAR = 1331
+SP_DUP_COND = 1332
+SP_DUP_CURS = 1333
+SP_CANT_ALTER = 1334
+SP_SUBSELECT_NYI = 1335
+STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
+SP_VARCOND_AFTER_CURSHNDLR = 1337
+SP_CURSOR_AFTER_HANDLER = 1338
+SP_CASE_NOT_FOUND = 1339
+FPARSER_TOO_BIG_FILE = 1340
+FPARSER_BAD_HEADER = 1341
+FPARSER_EOF_IN_COMMENT = 1342
+FPARSER_ERROR_IN_PARAMETER = 1343
+FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
+VIEW_NO_EXPLAIN = 1345
+FRM_UNKNOWN_TYPE = 1346
+WRONG_OBJECT = 1347
+NONUPDATEABLE_COLUMN = 1348
+VIEW_SELECT_DERIVED = 1349
+VIEW_SELECT_CLAUSE = 1350
+VIEW_SELECT_VARIABLE = 1351
+VIEW_SELECT_TMPTABLE = 1352
+VIEW_WRONG_LIST = 1353
+WARN_VIEW_MERGE = 1354
+WARN_VIEW_WITHOUT_KEY = 1355
+VIEW_INVALID = 1356
+SP_NO_DROP_SP = 1357
+SP_GOTO_IN_HNDLR = 1358
+TRG_ALREADY_EXISTS = 1359
+TRG_DOES_NOT_EXIST = 1360
+TRG_ON_VIEW_OR_TEMP_TABLE = 1361
+TRG_CANT_CHANGE_ROW = 1362
+TRG_NO_SUCH_ROW_IN_TRG = 1363
+NO_DEFAULT_FOR_FIELD = 1364
+DIVISION_BY_ZERO = 1365
+TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
+ILLEGAL_VALUE_FOR_TYPE = 1367
+VIEW_NONUPD_CHECK = 1368
+VIEW_CHECK_FAILED = 1369
+PROCACCESS_DENIED_ERROR = 1370
+RELAY_LOG_FAIL = 1371
+PASSWD_LENGTH = 1372
+UNKNOWN_TARGET_BINLOG = 1373
+IO_ERR_LOG_INDEX_READ = 1374
+BINLOG_PURGE_PROHIBITED = 1375
+FSEEK_FAIL = 1376
+BINLOG_PURGE_FATAL_ERR = 1377
+LOG_IN_USE = 1378
+LOG_PURGE_UNKNOWN_ERR = 1379
+RELAY_LOG_INIT = 1380
+NO_BINARY_LOGGING = 1381
+RESERVED_SYNTAX = 1382
+WSAS_FAILED = 1383
+DIFF_GROUPS_PROC = 1384
+NO_GROUP_FOR_PROC = 1385
+ORDER_WITH_PROC = 1386
+LOGGING_PROHIBIT_CHANGING_OF = 1387
+NO_FILE_MAPPING = 1388
+WRONG_MAGIC = 1389
+PS_MANY_PARAM = 1390
+KEY_PART_0 = 1391
+VIEW_CHECKSUM = 1392
+VIEW_MULTIUPDATE = 1393
+VIEW_NO_INSERT_FIELD_LIST = 1394
+VIEW_DELETE_MERGE_VIEW = 1395
+CANNOT_USER = 1396
+XAER_NOTA = 1397
+XAER_INVAL = 1398
+XAER_RMFAIL = 1399
+XAER_OUTSIDE = 1400
+XAER_RMERR = 1401
+XA_RBROLLBACK = 1402
+NONEXISTING_PROC_GRANT = 1403
+PROC_AUTO_GRANT_FAIL = 1404
+PROC_AUTO_REVOKE_FAIL = 1405
+DATA_TOO_LONG = 1406
+SP_BAD_SQLSTATE = 1407
+STARTUP = 1408
+LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
+CANT_CREATE_USER_WITH_GRANT = 1410
+WRONG_VALUE_FOR_TYPE = 1411
+TABLE_DEF_CHANGED = 1412
+SP_DUP_HANDLER = 1413
+SP_NOT_VAR_ARG = 1414
+SP_NO_RETSET = 1415
+CANT_CREATE_GEOMETRY_OBJECT = 1416
+FAILED_ROUTINE_BREAK_BINLOG = 1417
+BINLOG_UNSAFE_ROUTINE = 1418
+BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
+EXEC_STMT_WITH_OPEN_CURSOR = 1420
+STMT_HAS_NO_OPEN_CURSOR = 1421
+COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
+NO_DEFAULT_FOR_VIEW_FIELD = 1423
+SP_NO_RECURSION = 1424
+TOO_BIG_SCALE = 1425
+TOO_BIG_PRECISION = 1426
+M_BIGGER_THAN_D = 1427
+WRONG_LOCK_OF_SYSTEM_TABLE = 1428
+CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
+QUERY_ON_FOREIGN_DATA_SOURCE = 1430
+FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
+FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
+FOREIGN_DATA_STRING_INVALID = 1433
+CANT_CREATE_FEDERATED_TABLE = 1434
+TRG_IN_WRONG_SCHEMA = 1435
+STACK_OVERRUN_NEED_MORE = 1436
+TOO_LONG_BODY = 1437
+WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
+TOO_BIG_DISPLAYWIDTH = 1439
+XAER_DUPID = 1440
+DATETIME_FUNCTION_OVERFLOW = 1441
+CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
+VIEW_PREVENT_UPDATE = 1443
+PS_NO_RECURSION = 1444
+SP_CANT_SET_AUTOCOMMIT = 1445
+MALFORMED_DEFINER = 1446
+VIEW_FRM_NO_USER = 1447
+VIEW_OTHER_USER = 1448
+NO_SUCH_USER = 1449
+FORBID_SCHEMA_CHANGE = 1450
+ROW_IS_REFERENCED_2 = 1451
+NO_REFERENCED_ROW_2 = 1452
+SP_BAD_VAR_SHADOW = 1453
+TRG_NO_DEFINER = 1454
+OLD_FILE_FORMAT = 1455
+SP_RECURSION_LIMIT = 1456
+SP_PROC_TABLE_CORRUPT = 1457
+SP_WRONG_NAME = 1458
+TABLE_NEEDS_UPGRADE = 1459
+SP_NO_AGGREGATE = 1460
+MAX_PREPARED_STMT_COUNT_REACHED = 1461
+VIEW_RECURSIVE = 1462
+NON_GROUPING_FIELD_USED = 1463
+TABLE_CANT_HANDLE_SPKEYS = 1464
+NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
+USERNAME = 1466
+HOSTNAME = 1467
+WRONG_STRING_LENGTH = 1468
+ERROR_LAST = 1468
+
+# MariaDB only
+STATEMENT_TIMEOUT = 1969
+QUERY_TIMEOUT = 3024
+# https://github.com/PyMySQL/PyMySQL/issues/607
+CONSTRAINT_FAILED = 4025
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/FIELD_TYPE.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/FIELD_TYPE.py"
new file mode 100644
index 0000000..b8b4486
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/FIELD_TYPE.py"
@@ -0,0 +1,31 @@
+DECIMAL = 0
+TINY = 1
+SHORT = 2
+LONG = 3
+FLOAT = 4
+DOUBLE = 5
+NULL = 6
+TIMESTAMP = 7
+LONGLONG = 8
+INT24 = 9
+DATE = 10
+TIME = 11
+DATETIME = 12
+YEAR = 13
+NEWDATE = 14
+VARCHAR = 15
+BIT = 16
+JSON = 245
+NEWDECIMAL = 246
+ENUM = 247
+SET = 248
+TINY_BLOB = 249
+MEDIUM_BLOB = 250
+LONG_BLOB = 251
+BLOB = 252
+VAR_STRING = 253
+STRING = 254
+GEOMETRY = 255
+
+CHAR = TINY
+INTERVAL = ENUM
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/FLAG.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/FLAG.py"
new file mode 100644
index 0000000..f9ebfad
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/FLAG.py"
@@ -0,0 +1,15 @@
+NOT_NULL = 1
+PRI_KEY = 2
+UNIQUE_KEY = 4
+MULTIPLE_KEY = 8
+BLOB = 16
+UNSIGNED = 32
+ZEROFILL = 64
+BINARY = 128
+ENUM = 256
+AUTO_INCREMENT = 512
+TIMESTAMP = 1024
+SET = 2048
+PART_KEY = 16384
+GROUP = 32767
+UNIQUE = 65536
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/SERVER_STATUS.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/SERVER_STATUS.py"
new file mode 100644
index 0000000..8f8d776
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/SERVER_STATUS.py"
@@ -0,0 +1,10 @@
+SERVER_STATUS_IN_TRANS = 1
+SERVER_STATUS_AUTOCOMMIT = 2
+SERVER_MORE_RESULTS_EXISTS = 8
+SERVER_QUERY_NO_GOOD_INDEX_USED = 16
+SERVER_QUERY_NO_INDEX_USED = 32
+SERVER_STATUS_CURSOR_EXISTS = 64
+SERVER_STATUS_LAST_ROW_SENT = 128
+SERVER_STATUS_DB_DROPPED = 256
+SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512
+SERVER_STATUS_METADATA_CHANGED = 1024
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/__init__.py"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/constants/__init__.py"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/converters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/converters.py"
new file mode 100644
index 0000000..dbf97ca
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/converters.py"
@@ -0,0 +1,363 @@
+import datetime
+from decimal import Decimal
+import re
+import time
+
+from .err import ProgrammingError
+from .constants import FIELD_TYPE
+
+
+def escape_item(val, charset, mapping=None):
+ if mapping is None:
+ mapping = encoders
+ encoder = mapping.get(type(val))
+
+ # Fallback to default when no encoder found
+ if not encoder:
+ try:
+ encoder = mapping[str]
+ except KeyError:
+ raise TypeError("no default type converter defined")
+
+ if encoder in (escape_dict, escape_sequence):
+ val = encoder(val, charset, mapping)
+ else:
+ val = encoder(val, mapping)
+ return val
+
+
+def escape_dict(val, charset, mapping=None):
+ raise TypeError("dict can not be used as parameter")
+
+
+def escape_sequence(val, charset, mapping=None):
+ n = []
+ for item in val:
+ quoted = escape_item(item, charset, mapping)
+ n.append(quoted)
+ return "(" + ",".join(n) + ")"
+
+
+def escape_set(val, charset, mapping=None):
+ return ",".join([escape_item(x, charset, mapping) for x in val])
+
+
+def escape_bool(value, mapping=None):
+ return str(int(value))
+
+
+def escape_int(value, mapping=None):
+ return str(value)
+
+
+def escape_float(value, mapping=None):
+ s = repr(value)
+ if s in ("inf", "-inf", "nan"):
+ raise ProgrammingError("%s can not be used with MySQL" % s)
+ if "e" not in s:
+ s += "e0"
+ return s
+
+
+_escape_table = [chr(x) for x in range(128)]
+_escape_table[0] = "\\0"
+_escape_table[ord("\\")] = "\\\\"
+_escape_table[ord("\n")] = "\\n"
+_escape_table[ord("\r")] = "\\r"
+_escape_table[ord("\032")] = "\\Z"
+_escape_table[ord('"')] = '\\"'
+_escape_table[ord("'")] = "\\'"
+
+
+def escape_string(value, mapping=None):
+ """escapes *value* without adding quote.
+
+ Value should be unicode
+ """
+ return value.translate(_escape_table)
+
+
+def escape_bytes_prefixed(value, mapping=None):
+ return "_binary'%s'" % value.decode("ascii", "surrogateescape").translate(
+ _escape_table
+ )
+
+
+def escape_bytes(value, mapping=None):
+ return "'%s'" % value.decode("ascii", "surrogateescape").translate(_escape_table)
+
+
+def escape_str(value, mapping=None):
+ return "'%s'" % escape_string(str(value), mapping)
+
+
+def escape_None(value, mapping=None):
+ return "NULL"
+
+
+def escape_timedelta(obj, mapping=None):
+ seconds = int(obj.seconds) % 60
+ minutes = int(obj.seconds // 60) % 60
+ hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
+ if obj.microseconds:
+ fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'"
+ else:
+ fmt = "'{0:02d}:{1:02d}:{2:02d}'"
+ return fmt.format(hours, minutes, seconds, obj.microseconds)
+
+
+def escape_time(obj, mapping=None):
+ if obj.microsecond:
+ fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
+ else:
+ fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'"
+ return fmt.format(obj)
+
+
+def escape_datetime(obj, mapping=None):
+ if obj.microsecond:
+ fmt = (
+ "'{0.year:04}-{0.month:02}-{0.day:02}"
+ + " {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
+ )
+ else:
+ fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'"
+ return fmt.format(obj)
+
+
+def escape_date(obj, mapping=None):
+ fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'"
+ return fmt.format(obj)
+
+
+def escape_struct_time(obj, mapping=None):
+ return escape_datetime(datetime.datetime(*obj[:6]))
+
+
+def Decimal2Literal(o, d):
+ return format(o, "f")
+
+
+def _convert_second_fraction(s):
+ if not s:
+ return 0
+ # Pad zeros to ensure the fraction length in microseconds
+ s = s.ljust(6, "0")
+ return int(s[:6])
+
+
+DATETIME_RE = re.compile(
+ r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?"
+)
+
+
+def convert_datetime(obj):
+ """Returns a DATETIME or TIMESTAMP column value as a datetime object:
+
+ >>> convert_datetime('2007-02-25 23:06:20')
+ datetime.datetime(2007, 2, 25, 23, 6, 20)
+ >>> convert_datetime('2007-02-25T23:06:20')
+ datetime.datetime(2007, 2, 25, 23, 6, 20)
+
+ Illegal values are returned as str:
+
+ >>> convert_datetime('2007-02-31T23:06:20')
+ '2007-02-31T23:06:20'
+ >>> convert_datetime('0000-00-00 00:00:00')
+ '0000-00-00 00:00:00'
+ """
+ if isinstance(obj, (bytes, bytearray)):
+ obj = obj.decode("ascii")
+
+ m = DATETIME_RE.match(obj)
+ if not m:
+ return convert_date(obj)
+
+ try:
+ groups = list(m.groups())
+ groups[-1] = _convert_second_fraction(groups[-1])
+ return datetime.datetime(*[int(x) for x in groups])
+ except ValueError:
+ return convert_date(obj)
+
+
+TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
+
+
+def convert_timedelta(obj):
+ """Returns a TIME column as a timedelta object:
+
+ >>> convert_timedelta('25:06:17')
+ datetime.timedelta(days=1, seconds=3977)
+ >>> convert_timedelta('-25:06:17')
+ datetime.timedelta(days=-2, seconds=82423)
+
+ Illegal values are returned as string:
+
+ >>> convert_timedelta('random crap')
+ 'random crap'
+
+ Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
+ can accept values as (+|-)DD HH:MM:SS. The latter format will not
+ be parsed correctly by this function.
+ """
+ if isinstance(obj, (bytes, bytearray)):
+ obj = obj.decode("ascii")
+
+ m = TIMEDELTA_RE.match(obj)
+ if not m:
+ return obj
+
+ try:
+ groups = list(m.groups())
+ groups[-1] = _convert_second_fraction(groups[-1])
+ negate = -1 if groups[0] else 1
+ hours, minutes, seconds, microseconds = groups[1:]
+
+ tdelta = (
+ datetime.timedelta(
+ hours=int(hours),
+ minutes=int(minutes),
+ seconds=int(seconds),
+ microseconds=int(microseconds),
+ )
+ * negate
+ )
+ return tdelta
+ except ValueError:
+ return obj
+
+
+TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
+
+
+def convert_time(obj):
+ """Returns a TIME column as a time object:
+
+ >>> convert_time('15:06:17')
+ datetime.time(15, 6, 17)
+
+ Illegal values are returned as str:
+
+ >>> convert_time('-25:06:17')
+ '-25:06:17'
+ >>> convert_time('random crap')
+ 'random crap'
+
+ Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
+ can accept values as (+|-)DD HH:MM:SS. The latter format will not
+ be parsed correctly by this function.
+
+ Also note that MySQL's TIME column corresponds more closely to
+ Python's timedelta and not time. However if you want TIME columns
+ to be treated as time-of-day and not a time offset, then you can
+ use set this function as the converter for FIELD_TYPE.TIME.
+ """
+ if isinstance(obj, (bytes, bytearray)):
+ obj = obj.decode("ascii")
+
+ m = TIME_RE.match(obj)
+ if not m:
+ return obj
+
+ try:
+ groups = list(m.groups())
+ groups[-1] = _convert_second_fraction(groups[-1])
+ hours, minutes, seconds, microseconds = groups
+ return datetime.time(
+ hour=int(hours),
+ minute=int(minutes),
+ second=int(seconds),
+ microsecond=int(microseconds),
+ )
+ except ValueError:
+ return obj
+
+
+def convert_date(obj):
+ """Returns a DATE column as a date object:
+
+ >>> convert_date('2007-02-26')
+ datetime.date(2007, 2, 26)
+
+ Illegal values are returned as str:
+
+ >>> convert_date('2007-02-31')
+ '2007-02-31'
+ >>> convert_date('0000-00-00')
+ '0000-00-00'
+ """
+ if isinstance(obj, (bytes, bytearray)):
+ obj = obj.decode("ascii")
+ try:
+ return datetime.date(*[int(x) for x in obj.split("-", 2)])
+ except ValueError:
+ return obj
+
+
+def through(x):
+ return x
+
+
+# def convert_bit(b):
+# b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
+# return struct.unpack(">Q", b)[0]
+#
+# the snippet above is right, but MySQLdb doesn't process bits,
+# so we shouldn't either
+convert_bit = through
+
+
+encoders = {
+ bool: escape_bool,
+ int: escape_int,
+ float: escape_float,
+ str: escape_str,
+ bytes: escape_bytes,
+ tuple: escape_sequence,
+ list: escape_sequence,
+ set: escape_sequence,
+ frozenset: escape_sequence,
+ dict: escape_dict,
+ type(None): escape_None,
+ datetime.date: escape_date,
+ datetime.datetime: escape_datetime,
+ datetime.timedelta: escape_timedelta,
+ datetime.time: escape_time,
+ time.struct_time: escape_struct_time,
+ Decimal: Decimal2Literal,
+}
+
+
+decoders = {
+ FIELD_TYPE.BIT: convert_bit,
+ FIELD_TYPE.TINY: int,
+ FIELD_TYPE.SHORT: int,
+ FIELD_TYPE.LONG: int,
+ FIELD_TYPE.FLOAT: float,
+ FIELD_TYPE.DOUBLE: float,
+ FIELD_TYPE.LONGLONG: int,
+ FIELD_TYPE.INT24: int,
+ FIELD_TYPE.YEAR: int,
+ FIELD_TYPE.TIMESTAMP: convert_datetime,
+ FIELD_TYPE.DATETIME: convert_datetime,
+ FIELD_TYPE.TIME: convert_timedelta,
+ FIELD_TYPE.DATE: convert_date,
+ FIELD_TYPE.BLOB: through,
+ FIELD_TYPE.TINY_BLOB: through,
+ FIELD_TYPE.MEDIUM_BLOB: through,
+ FIELD_TYPE.LONG_BLOB: through,
+ FIELD_TYPE.STRING: through,
+ FIELD_TYPE.VAR_STRING: through,
+ FIELD_TYPE.VARCHAR: through,
+ FIELD_TYPE.DECIMAL: Decimal,
+ FIELD_TYPE.NEWDECIMAL: Decimal,
+}
+
+
+# for MySQLdb compatibility
+conversions = encoders.copy()
+conversions.update(decoders)
+Thing2Literal = escape_str
+
+# Run doctests with `pytest --doctest-modules pymysql/converters.py`
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/cursors.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/cursors.py"
new file mode 100644
index 0000000..8be05ca
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/cursors.py"
@@ -0,0 +1,531 @@
+import re
+import warnings
+from . import err
+
+
+#: Regular expression for :meth:`Cursor.executemany`.
+#: executemany only supports simple bulk insert.
+#: You can use it to load large dataset.
+RE_INSERT_VALUES = re.compile(
+ r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)"
+ + r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))"
+ + r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
+ re.IGNORECASE | re.DOTALL,
+)
+
+
+class Cursor:
+ """
+ This is the object used to interact with the database.
+
+ Do not create an instance of a Cursor yourself. Call
+ connections.Connection.cursor().
+
+ See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
+ the specification.
+ """
+
+ #: Max statement size which :meth:`executemany` generates.
+ #:
+ #: Max size of allowed statement is max_allowed_packet - packet_header_size.
+ #: Default value of max_allowed_packet is 1048576.
+ max_stmt_length = 1024000
+
+ def __init__(self, connection):
+ self.connection = connection
+ self.warning_count = 0
+ self.description = None
+ self.rownumber = 0
+ self.rowcount = -1
+ self.arraysize = 1
+ self._executed = None
+ self._result = None
+ self._rows = None
+
+ def close(self):
+ """
+ Closing a cursor just exhausts all remaining data.
+ """
+ conn = self.connection
+ if conn is None:
+ return
+ try:
+ while self.nextset():
+ pass
+ finally:
+ self.connection = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ del exc_info
+ self.close()
+
+ def _get_db(self):
+ if not self.connection:
+ raise err.ProgrammingError("Cursor closed")
+ return self.connection
+
+ def _check_executed(self):
+ if not self._executed:
+ raise err.ProgrammingError("execute() first")
+
+ def _conv_row(self, row):
+ return row
+
+ def setinputsizes(self, *args):
+ """Does nothing, required by DB API."""
+
+ def setoutputsizes(self, *args):
+ """Does nothing, required by DB API."""
+
+ def _nextset(self, unbuffered=False):
+ """Get the next query set."""
+ conn = self._get_db()
+ current_result = self._result
+ if current_result is None or current_result is not conn._result:
+ return None
+ if not current_result.has_next:
+ return None
+ self._result = None
+ self._clear_result()
+ conn.next_result(unbuffered=unbuffered)
+ self._do_get_result()
+ return True
+
+ def nextset(self):
+ return self._nextset(False)
+
+ def _escape_args(self, args, conn):
+ if isinstance(args, (tuple, list)):
+ return tuple(conn.literal(arg) for arg in args)
+ elif isinstance(args, dict):
+ return {key: conn.literal(val) for (key, val) in args.items()}
+ else:
+ # If it's not a dictionary let's try escaping it anyways.
+ # Worst case it will throw a Value error
+ return conn.escape(args)
+
+ def mogrify(self, query, args=None):
+ """
+ Returns the exact string that would be sent to the database by calling the
+ execute() method.
+
+ :param query: Query to mogrify.
+ :type query: str
+
+ :param args: Parameters used with query. (optional)
+ :type args: tuple, list or dict
+
+ :return: The query with argument binding applied.
+ :rtype: str
+
+ This method follows the extension to the DB API 2.0 followed by Psycopg.
+ """
+ conn = self._get_db()
+
+ if args is not None:
+ query = query % self._escape_args(args, conn)
+
+ return query
+
+ def execute(self, query, args=None):
+ """Execute a query.
+
+ :param query: Query to execute.
+ :type query: str
+
+ :param args: Parameters used with query. (optional)
+ :type args: tuple, list or dict
+
+ :return: Number of affected rows.
+ :rtype: int
+
+ If args is a list or tuple, %s can be used as a placeholder in the query.
+ If args is a dict, %(name)s can be used as a placeholder in the query.
+ """
+ while self.nextset():
+ pass
+
+ query = self.mogrify(query, args)
+
+ result = self._query(query)
+ self._executed = query
+ return result
+
+ def executemany(self, query, args):
+ """Run several data against one query.
+
+ :param query: Query to execute.
+ :type query: str
+
+ :param args: Sequence of sequences or mappings. It is used as parameter.
+ :type args: tuple or list
+
+ :return: Number of rows affected, if any.
+ :rtype: int or None
+
+ This method improves performance on multiple-row INSERT and
+ REPLACE. Otherwise it is equivalent to looping over args with
+ execute().
+ """
+ if not args:
+ return
+
+ m = RE_INSERT_VALUES.match(query)
+ if m:
+ q_prefix = m.group(1) % ()
+ q_values = m.group(2).rstrip()
+ q_postfix = m.group(3) or ""
+ assert q_values[0] == "(" and q_values[-1] == ")"
+ return self._do_execute_many(
+ q_prefix,
+ q_values,
+ q_postfix,
+ args,
+ self.max_stmt_length,
+ self._get_db().encoding,
+ )
+
+ self.rowcount = sum(self.execute(query, arg) for arg in args)
+ return self.rowcount
+
+ def _do_execute_many(
+ self, prefix, values, postfix, args, max_stmt_length, encoding
+ ):
+ conn = self._get_db()
+ escape = self._escape_args
+ if isinstance(prefix, str):
+ prefix = prefix.encode(encoding)
+ if isinstance(postfix, str):
+ postfix = postfix.encode(encoding)
+ sql = bytearray(prefix)
+ args = iter(args)
+ v = values % escape(next(args), conn)
+ if isinstance(v, str):
+ v = v.encode(encoding, "surrogateescape")
+ sql += v
+ rows = 0
+ for arg in args:
+ v = values % escape(arg, conn)
+ if isinstance(v, str):
+ v = v.encode(encoding, "surrogateescape")
+ if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
+ rows += self.execute(sql + postfix)
+ sql = bytearray(prefix)
+ else:
+ sql += b","
+ sql += v
+ rows += self.execute(sql + postfix)
+ self.rowcount = rows
+ return rows
+
+ def callproc(self, procname, args=()):
+ """Execute stored procedure procname with args.
+
+ :param procname: Name of procedure to execute on server.
+ :type procname: str
+
+ :param args: Sequence of parameters to use with procedure.
+ :type args: tuple or list
+
+ Returns the original args.
+
+ Compatibility warning: PEP-249 specifies that any modified
+ parameters must be returned. This is currently impossible
+ as they are only available by storing them in a server
+ variable and then retrieved by a query. Since stored
+ procedures return zero or more result sets, there is no
+ reliable way to get at OUT or INOUT parameters via callproc.
+ The server variables are named @_procname_n, where procname
+ is the parameter above and n is the position of the parameter
+ (from zero). Once all result sets generated by the procedure
+ have been fetched, you can issue a SELECT @_procname_0, ...
+ query using .execute() to get any OUT or INOUT values.
+
+ Compatibility warning: The act of calling a stored procedure
+ itself creates an empty result set. This appears after any
+ result sets generated by the procedure. This is non-standard
+ behavior with respect to the DB-API. Be sure to use nextset()
+ to advance through all result sets; otherwise you may get
+ disconnected.
+ """
+ conn = self._get_db()
+ if args:
+ fmt = f"@_{procname}_%d=%s"
+ self._query(
+ "SET %s"
+ % ",".join(
+ fmt % (index, conn.escape(arg)) for index, arg in enumerate(args)
+ )
+ )
+ self.nextset()
+
+ q = "CALL {}({})".format(
+ procname,
+ ",".join(["@_%s_%d" % (procname, i) for i in range(len(args))]),
+ )
+ self._query(q)
+ self._executed = q
+ return args
+
+ def fetchone(self):
+ """Fetch the next row."""
+ self._check_executed()
+ if self._rows is None or self.rownumber >= len(self._rows):
+ return None
+ result = self._rows[self.rownumber]
+ self.rownumber += 1
+ return result
+
+ def fetchmany(self, size=None):
+ """Fetch several rows."""
+ self._check_executed()
+ if self._rows is None:
+ # Django expects () for EOF.
+ # https://github.com/django/django/blob/0c1518ee429b01c145cf5b34eab01b0b92f8c246/django/db/backends/mysql/features.py#L8
+ return ()
+ end = self.rownumber + (size or self.arraysize)
+ result = self._rows[self.rownumber : end]
+ self.rownumber = min(end, len(self._rows))
+ return result
+
+ def fetchall(self):
+ """Fetch all the rows."""
+ self._check_executed()
+ if self._rows is None:
+ return []
+ if self.rownumber:
+ result = self._rows[self.rownumber :]
+ else:
+ result = self._rows
+ self.rownumber = len(self._rows)
+ return result
+
+ def scroll(self, value, mode="relative"):
+ self._check_executed()
+ if mode == "relative":
+ r = self.rownumber + value
+ elif mode == "absolute":
+ r = value
+ else:
+ raise err.ProgrammingError("unknown scroll mode %s" % mode)
+
+ if not (0 <= r < len(self._rows)):
+ raise IndexError("out of range")
+ self.rownumber = r
+
+ def _query(self, q):
+ conn = self._get_db()
+ self._clear_result()
+ conn.query(q)
+ self._do_get_result()
+ return self.rowcount
+
+ def _clear_result(self):
+ self.rownumber = 0
+ self._result = None
+
+ self.rowcount = 0
+ self.warning_count = 0
+ self.description = None
+ self.lastrowid = None
+ self._rows = None
+
+ def _do_get_result(self):
+ conn = self._get_db()
+
+ self._result = result = conn._result
+
+ self.rowcount = result.affected_rows
+ self.warning_count = result.warning_count
+ self.description = result.description
+ self.lastrowid = result.insert_id
+ self._rows = result.rows
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ row = self.fetchone()
+ if row is None:
+ raise StopIteration
+ return row
+
+ def __getattr__(self, name):
+ # DB-API 2.0 optional extension says these errors can be accessed
+ # via Connection object. But MySQLdb had defined them on Cursor object.
+ if name in (
+ "Warning",
+ "Error",
+ "InterfaceError",
+ "DatabaseError",
+ "DataError",
+ "OperationalError",
+ "IntegrityError",
+ "InternalError",
+ "ProgrammingError",
+ "NotSupportedError",
+ ):
+ # Deprecated since v1.1
+ warnings.warn(
+ "PyMySQL errors hould be accessed from `pymysql` package",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return getattr(err, name)
+ raise AttributeError(name)
+
+
+class DictCursorMixin:
+ # You can override this to use OrderedDict or other dict-like types.
+ dict_type = dict
+
+ def _do_get_result(self):
+ super()._do_get_result()
+ fields = []
+ if self.description:
+ for f in self._result.fields:
+ name = f.name
+ if name in fields:
+ name = f.table_name + "." + name
+ fields.append(name)
+ self._fields = fields
+
+ if fields and self._rows:
+ self._rows = [self._conv_row(r) for r in self._rows]
+
+ def _conv_row(self, row):
+ if row is None:
+ return None
+ return self.dict_type(zip(self._fields, row))
+
+
+class DictCursor(DictCursorMixin, Cursor):
+ """A cursor which returns results as a dictionary"""
+
+
+class SSCursor(Cursor):
+ """
+ Unbuffered Cursor, mainly useful for queries that return a lot of data,
+ or for connections to remote servers over a slow network.
+
+ Instead of copying every row of data into a buffer, this will fetch
+ rows as needed. The upside of this is the client uses much less memory,
+ and rows are returned much faster when traveling over a slow network
+ or if the result set is very big.
+
+ There are limitations, though. The MySQL protocol doesn't support
+ returning the total number of rows, so the only way to tell how many rows
+ there are is to iterate over every row returned. Also, it currently isn't
+ possible to scroll backwards, as only the current row is held in memory.
+ """
+
+ def _conv_row(self, row):
+ return row
+
+ def close(self):
+ conn = self.connection
+ if conn is None:
+ return
+
+ if self._result is not None and self._result is conn._result:
+ self._result._finish_unbuffered_query()
+
+ try:
+ while self.nextset():
+ pass
+ finally:
+ self.connection = None
+
+ __del__ = close
+
+ def _query(self, q):
+ conn = self._get_db()
+ self._clear_result()
+ conn.query(q, unbuffered=True)
+ self._do_get_result()
+ return self.rowcount
+
+ def nextset(self):
+ return self._nextset(unbuffered=True)
+
+ def read_next(self):
+ """Read next row."""
+ return self._conv_row(self._result._read_rowdata_packet_unbuffered())
+
+ def fetchone(self):
+ """Fetch next row."""
+ self._check_executed()
+ row = self.read_next()
+ if row is None:
+ self.warning_count = self._result.warning_count
+ return None
+ self.rownumber += 1
+ return row
+
+ def fetchall(self):
+ """
+ Fetch all, as per MySQLdb. Pretty useless for large queries, as
+ it is buffered. See fetchall_unbuffered(), if you want an unbuffered
+ generator version of this method.
+ """
+ return list(self.fetchall_unbuffered())
+
+ def fetchall_unbuffered(self):
+ """
+ Fetch all, implemented as a generator, which isn't to standard,
+ however, it doesn't make sense to return everything in a list, as that
+ would use ridiculous memory for large result sets.
+ """
+ return iter(self.fetchone, None)
+
+ def fetchmany(self, size=None):
+ """Fetch many."""
+ self._check_executed()
+ if size is None:
+ size = self.arraysize
+
+ rows = []
+ for i in range(size):
+ row = self.read_next()
+ if row is None:
+ self.warning_count = self._result.warning_count
+ break
+ rows.append(row)
+ self.rownumber += 1
+ if not rows:
+ # Django expects () for EOF.
+ # https://github.com/django/django/blob/0c1518ee429b01c145cf5b34eab01b0b92f8c246/django/db/backends/mysql/features.py#L8
+ return ()
+ return rows
+
+ def scroll(self, value, mode="relative"):
+ self._check_executed()
+
+ if mode == "relative":
+ if value < 0:
+ raise err.NotSupportedError(
+ "Backwards scrolling not supported by this cursor"
+ )
+
+ for _ in range(value):
+ self.read_next()
+ self.rownumber += value
+ elif mode == "absolute":
+ if value < self.rownumber:
+ raise err.NotSupportedError(
+ "Backwards scrolling not supported by this cursor"
+ )
+
+ end = value - self.rownumber
+ for _ in range(end):
+ self.read_next()
+ self.rownumber = value
+ else:
+ raise err.ProgrammingError("unknown scroll mode %s" % mode)
+
+
+class SSDictCursor(DictCursorMixin, SSCursor):
+ """An unbuffered cursor, which returns results as a dictionary"""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/err.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/err.py"
new file mode 100644
index 0000000..dac65d3
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/err.py"
@@ -0,0 +1,150 @@
+import struct
+
+from .constants import ER
+
+
+class MySQLError(Exception):
+ """Exception related to operation with MySQL."""
+
+
+class Warning(Warning, MySQLError):
+ """Exception raised for important warnings like data truncations
+ while inserting, etc."""
+
+
+class Error(MySQLError):
+ """Exception that is the base class of all other error exceptions
+ (not Warning)."""
+
+
+class InterfaceError(Error):
+ """Exception raised for errors that are related to the database
+ interface rather than the database itself."""
+
+
+class DatabaseError(Error):
+ """Exception raised for errors that are related to the
+ database."""
+
+
+class DataError(DatabaseError):
+ """Exception raised for errors that are due to problems with the
+ processed data like division by zero, numeric value out of range,
+ etc."""
+
+
+class OperationalError(DatabaseError):
+ """Exception raised for errors that are related to the database's
+ operation and not necessarily under the control of the programmer,
+ e.g. an unexpected disconnect occurs, the data source name is not
+ found, a transaction could not be processed, a memory allocation
+ error occurred during processing, etc."""
+
+
+class IntegrityError(DatabaseError):
+ """Exception raised when the relational integrity of the database
+ is affected, e.g. a foreign key check fails, duplicate key,
+ etc."""
+
+
+class InternalError(DatabaseError):
+ """Exception raised when the database encounters an internal
+ error, e.g. the cursor is not valid anymore, the transaction is
+ out of sync, etc."""
+
+
+class ProgrammingError(DatabaseError):
+ """Exception raised for programming errors, e.g. table not found
+ or already exists, syntax error in the SQL statement, wrong number
+ of parameters specified, etc."""
+
+
+class NotSupportedError(DatabaseError):
+ """Exception raised in case a method or database API was used
+ which is not supported by the database, e.g. requesting a
+ .rollback() on a connection that does not support transaction or
+ has transactions turned off."""
+
+
+error_map = {}
+
+
+def _map_error(exc, *errors):
+ for error in errors:
+ error_map[error] = exc
+
+
+_map_error(
+ ProgrammingError,
+ ER.DB_CREATE_EXISTS,
+ ER.SYNTAX_ERROR,
+ ER.PARSE_ERROR,
+ ER.NO_SUCH_TABLE,
+ ER.WRONG_DB_NAME,
+ ER.WRONG_TABLE_NAME,
+ ER.FIELD_SPECIFIED_TWICE,
+ ER.INVALID_GROUP_FUNC_USE,
+ ER.UNSUPPORTED_EXTENSION,
+ ER.TABLE_MUST_HAVE_COLUMNS,
+ ER.CANT_DO_THIS_DURING_AN_TRANSACTION,
+ ER.WRONG_DB_NAME,
+ ER.WRONG_COLUMN_NAME,
+)
+_map_error(
+ DataError,
+ ER.WARN_DATA_TRUNCATED,
+ ER.WARN_NULL_TO_NOTNULL,
+ ER.WARN_DATA_OUT_OF_RANGE,
+ ER.NO_DEFAULT,
+ ER.PRIMARY_CANT_HAVE_NULL,
+ ER.DATA_TOO_LONG,
+ ER.DATETIME_FUNCTION_OVERFLOW,
+ ER.TRUNCATED_WRONG_VALUE_FOR_FIELD,
+ ER.ILLEGAL_VALUE_FOR_TYPE,
+)
+_map_error(
+ IntegrityError,
+ ER.DUP_ENTRY,
+ ER.NO_REFERENCED_ROW,
+ ER.NO_REFERENCED_ROW_2,
+ ER.ROW_IS_REFERENCED,
+ ER.ROW_IS_REFERENCED_2,
+ ER.CANNOT_ADD_FOREIGN,
+ ER.BAD_NULL_ERROR,
+)
+_map_error(
+ NotSupportedError,
+ ER.WARNING_NOT_COMPLETE_ROLLBACK,
+ ER.NOT_SUPPORTED_YET,
+ ER.FEATURE_DISABLED,
+ ER.UNKNOWN_STORAGE_ENGINE,
+)
+_map_error(
+ OperationalError,
+ ER.DBACCESS_DENIED_ERROR,
+ ER.ACCESS_DENIED_ERROR,
+ ER.CON_COUNT_ERROR,
+ ER.TABLEACCESS_DENIED_ERROR,
+ ER.COLUMNACCESS_DENIED_ERROR,
+ ER.CONSTRAINT_FAILED,
+ ER.LOCK_DEADLOCK,
+)
+
+
+del _map_error, ER
+
+
+def raise_mysql_exception(data):
+ errno = struct.unpack("<h", data[1:3])[0]
+ # https://dev.mysql.com/doc/dev/mysql-server/latest/page_protocol_basic_err_packet.html
+ # Error packet has optional sqlstate that is 5 bytes and starts with '#'.
+ if data[3] == 0x23: # '#'
+ # sqlstate = data[4:9].decode()
+ # TODO: Append (sqlstate) in the error message. This will be come in next minor release.
+ errval = data[9:].decode("utf-8", "replace")
+ else:
+ errval = data[3:].decode("utf-8", "replace")
+ errorclass = error_map.get(errno)
+ if errorclass is None:
+ errorclass = InternalError if errno < 1000 else OperationalError
+ raise errorclass(errno, errval)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/optionfile.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/optionfile.py"
new file mode 100644
index 0000000..c36f162
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/optionfile.py"
@@ -0,0 +1,21 @@
+import configparser
+
+
+class Parser(configparser.RawConfigParser):
+ def __init__(self, **kwargs):
+ kwargs["allow_no_value"] = True
+ configparser.RawConfigParser.__init__(self, **kwargs)
+
+ def __remove_quotes(self, value):
+ quotes = ["'", '"']
+ for quote in quotes:
+ if len(value) >= 2 and value[0] == value[-1] == quote:
+ return value[1:-1]
+ return value
+
+ def optionxform(self, key):
+ return key.lower().replace("_", "-")
+
+ def get(self, section, option):
+ value = configparser.RawConfigParser.get(self, section, option)
+ return self.__remove_quotes(value)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/protocol.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/protocol.py"
new file mode 100644
index 0000000..98fde6d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/protocol.py"
@@ -0,0 +1,356 @@
+# Python implementation of low level MySQL client-server protocol
+# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
+
+from .charset import MBLENGTH
+from .constants import FIELD_TYPE, SERVER_STATUS
+from . import err
+
+import struct
+import sys
+
+
+DEBUG = False
+
+NULL_COLUMN = 251
+UNSIGNED_CHAR_COLUMN = 251
+UNSIGNED_SHORT_COLUMN = 252
+UNSIGNED_INT24_COLUMN = 253
+UNSIGNED_INT64_COLUMN = 254
+
+
+def dump_packet(data): # pragma: no cover
+ def printable(data):
+ if 32 <= data < 127:
+ return chr(data)
+ return "."
+
+ try:
+ print("packet length:", len(data))
+ for i in range(1, 7):
+ f = sys._getframe(i)
+ print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
+ print("-" * 66)
+ except ValueError:
+ pass
+ dump_data = [data[i : i + 16] for i in range(0, min(len(data), 256), 16)]
+ for d in dump_data:
+ print(
+ " ".join(f"{x:02X}" for x in d)
+ + " " * (16 - len(d))
+ + " " * 2
+ + "".join(printable(x) for x in d)
+ )
+ print("-" * 66)
+ print()
+
+
+class MysqlPacket:
+ """Representation of a MySQL response packet.
+
+ Provides an interface for reading/parsing the packet results.
+ """
+
+ __slots__ = ("_position", "_data")
+
+ def __init__(self, data, encoding):
+ self._position = 0
+ self._data = data
+
+ def get_all_data(self):
+ return self._data
+
+ def read(self, size):
+ """Read the first 'size' bytes in packet and advance cursor past them."""
+ result = self._data[self._position : (self._position + size)]
+ if len(result) != size:
+ error = (
+ "Result length not requested length:\n"
+ f"Expected={size}. Actual={len(result)}. Position: {self._position}. Data Length: {len(self._data)}"
+ )
+ if DEBUG:
+ print(error)
+ self.dump()
+ raise AssertionError(error)
+ self._position += size
+ return result
+
+ def read_all(self):
+ """Read all remaining data in the packet.
+
+ (Subsequent read() will return errors.)
+ """
+ result = self._data[self._position :]
+ self._position = None # ensure no subsequent read()
+ return result
+
+ def advance(self, length):
+ """Advance the cursor in data buffer 'length' bytes."""
+ new_position = self._position + length
+ if new_position < 0 or new_position > len(self._data):
+ raise Exception(
+ f"Invalid advance amount ({length}) for cursor. Position={new_position}"
+ )
+ self._position = new_position
+
+ def rewind(self, position=0):
+ """Set the position of the data buffer cursor to 'position'."""
+ if position < 0 or position > len(self._data):
+ raise Exception("Invalid position to rewind cursor to: %s." % position)
+ self._position = position
+
+ def get_bytes(self, position, length=1):
+ """Get 'length' bytes starting at 'position'.
+
+ Position is start of payload (first four packet header bytes are not
+ included) starting at index '0'.
+
+ No error checking is done. If requesting outside end of buffer
+ an empty string (or string shorter than 'length') may be returned!
+ """
+ return self._data[position : (position + length)]
+
+ def read_uint8(self):
+ result = self._data[self._position]
+ self._position += 1
+ return result
+
+ def read_uint16(self):
+ result = struct.unpack_from("<H", self._data, self._position)[0]
+ self._position += 2
+ return result
+
+ def read_uint24(self):
+ low, high = struct.unpack_from("<HB", self._data, self._position)
+ self._position += 3
+ return low + (high << 16)
+
+ def read_uint32(self):
+ result = struct.unpack_from("<I", self._data, self._position)[0]
+ self._position += 4
+ return result
+
+ def read_uint64(self):
+ result = struct.unpack_from("<Q", self._data, self._position)[0]
+ self._position += 8
+ return result
+
+ def read_string(self):
+ end_pos = self._data.find(b"\0", self._position)
+ if end_pos < 0:
+ return None
+ result = self._data[self._position : end_pos]
+ self._position = end_pos + 1
+ return result
+
+ def read_length_encoded_integer(self):
+ """Read a 'Length Coded Binary' number from the data buffer.
+
+ Length coded numbers can be anywhere from 1 to 9 bytes depending
+ on the value of the first byte.
+ """
+ c = self.read_uint8()
+ if c == NULL_COLUMN:
+ return None
+ if c < UNSIGNED_CHAR_COLUMN:
+ return c
+ elif c == UNSIGNED_SHORT_COLUMN:
+ return self.read_uint16()
+ elif c == UNSIGNED_INT24_COLUMN:
+ return self.read_uint24()
+ elif c == UNSIGNED_INT64_COLUMN:
+ return self.read_uint64()
+
+ def read_length_coded_string(self):
+ """Read a 'Length Coded String' from the data buffer.
+
+ A 'Length Coded String' consists first of a length coded
+ (unsigned, positive) integer represented in 1-9 bytes followed by
+ that many bytes of binary data. (For example "cat" would be "3cat".)
+ """
+ length = self.read_length_encoded_integer()
+ if length is None:
+ return None
+ return self.read(length)
+
+ def read_struct(self, fmt):
+ s = struct.Struct(fmt)
+ result = s.unpack_from(self._data, self._position)
+ self._position += s.size
+ return result
+
+ def is_ok_packet(self):
+ # https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
+ return self._data[0] == 0 and len(self._data) >= 7
+
+ def is_eof_packet(self):
+ # http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
+ # Caution: \xFE may be LengthEncodedInteger.
+ # If \xFE is LengthEncodedInteger header, 8bytes followed.
+ return self._data[0] == 0xFE and len(self._data) < 9
+
+ def is_auth_switch_request(self):
+ # http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
+ return self._data[0] == 0xFE
+
+ def is_extra_auth_data(self):
+ # https://dev.mysql.com/doc/internals/en/successful-authentication.html
+ return self._data[0] == 1
+
+ def is_resultset_packet(self):
+ field_count = self._data[0]
+ return 1 <= field_count <= 250
+
+ def is_load_local_packet(self):
+ return self._data[0] == 0xFB
+
+ def is_error_packet(self):
+ return self._data[0] == 0xFF
+
+ def check_error(self):
+ if self.is_error_packet():
+ self.raise_for_error()
+
+ def raise_for_error(self):
+ self.rewind()
+ self.advance(1) # field_count == error (we already know that)
+ errno = self.read_uint16()
+ if DEBUG:
+ print("errno =", errno)
+ err.raise_mysql_exception(self._data)
+
+ def dump(self):
+ dump_packet(self._data)
+
+
+class FieldDescriptorPacket(MysqlPacket):
+ """A MysqlPacket that represents a specific column's metadata in the result.
+
+ Parsing is automatically done and the results are exported via public
+ attributes on the class such as: db, table_name, name, length, type_code.
+ """
+
+ def __init__(self, data, encoding):
+ MysqlPacket.__init__(self, data, encoding)
+ self._parse_field_descriptor(encoding)
+
+ def _parse_field_descriptor(self, encoding):
+ """Parse the 'Field Descriptor' (Metadata) packet.
+
+ This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
+ """
+ self.catalog = self.read_length_coded_string()
+ self.db = self.read_length_coded_string()
+ self.table_name = self.read_length_coded_string().decode(encoding)
+ self.org_table = self.read_length_coded_string().decode(encoding)
+ self.name = self.read_length_coded_string().decode(encoding)
+ self.org_name = self.read_length_coded_string().decode(encoding)
+ (
+ self.charsetnr,
+ self.length,
+ self.type_code,
+ self.flags,
+ self.scale,
+ ) = self.read_struct("<xHIBHBxx")
+ # 'default' is a length coded binary and is still in the buffer?
+ # not used for normal result sets...
+
+ def description(self):
+ """Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
+ return (
+ self.name,
+ self.type_code,
+ None, # TODO: display_length; should this be self.length?
+ self.get_column_length(), # 'internal_size'
+ self.get_column_length(), # 'precision' # TODO: why!?!?
+ self.scale,
+ self.flags % 2 == 0,
+ )
+
+ def get_column_length(self):
+ if self.type_code == FIELD_TYPE.VAR_STRING:
+ mblen = MBLENGTH.get(self.charsetnr, 1)
+ return self.length // mblen
+ return self.length
+
+ def __str__(self):
+ return "{} {!r}.{!r}.{!r}, type={}, flags={:x}".format(
+ self.__class__,
+ self.db,
+ self.table_name,
+ self.name,
+ self.type_code,
+ self.flags,
+ )
+
+
+class OKPacketWrapper:
+ """
+ OK Packet Wrapper. It uses an existing packet object, and wraps
+ around it, exposing useful variables while still providing access
+ to the original packet objects variables and methods.
+ """
+
+ def __init__(self, from_packet):
+ if not from_packet.is_ok_packet():
+ raise ValueError(
+ "Cannot create "
+ + str(self.__class__.__name__)
+ + " object from invalid packet type"
+ )
+
+ self.packet = from_packet
+ self.packet.advance(1)
+
+ self.affected_rows = self.packet.read_length_encoded_integer()
+ self.insert_id = self.packet.read_length_encoded_integer()
+ self.server_status, self.warning_count = self.read_struct("<HH")
+ self.message = self.packet.read_all()
+ self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
+
+ def __getattr__(self, key):
+ return getattr(self.packet, key)
+
+
+class EOFPacketWrapper:
+ """
+ EOF Packet Wrapper. It uses an existing packet object, and wraps
+ around it, exposing useful variables while still providing access
+ to the original packet objects variables and methods.
+ """
+
+ def __init__(self, from_packet):
+ if not from_packet.is_eof_packet():
+ raise ValueError(
+ f"Cannot create '{self.__class__}' object from invalid packet type"
+ )
+
+ self.packet = from_packet
+ self.warning_count, self.server_status = self.packet.read_struct("<xhh")
+ if DEBUG:
+ print("server_status=", self.server_status)
+ self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
+
+ def __getattr__(self, key):
+ return getattr(self.packet, key)
+
+
+class LoadLocalPacketWrapper:
+ """
+ Load Local Packet Wrapper. It uses an existing packet object, and wraps
+ around it, exposing useful variables while still providing access
+ to the original packet objects variables and methods.
+ """
+
+ def __init__(self, from_packet):
+ if not from_packet.is_load_local_packet():
+ raise ValueError(
+ f"Cannot create '{self.__class__}' object from invalid packet type"
+ )
+
+ self.packet = from_packet
+ self.filename = self.packet.get_all_data()[1:]
+ if DEBUG:
+ print("filename=", self.filename)
+
+ def __getattr__(self, key):
+ return getattr(self.packet, key)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/times.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/times.py"
new file mode 100644
index 0000000..4497dac
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/pymysql/times.py"
@@ -0,0 +1,20 @@
+from time import localtime
+from datetime import date, datetime, time, timedelta
+
+
+Date = date
+Time = time
+TimeDelta = timedelta
+Timestamp = datetime
+
+
+def DateFromTicks(ticks):
+ return date(*localtime(ticks)[:3])
+
+
+def TimeFromTicks(ticks):
+ return time(*localtime(ticks)[3:6])
+
+
+def TimestampFromTicks(ticks):
+ return datetime(*localtime(ticks)[:6])
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/METADATA"
new file mode 100644
index 0000000..b31773e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/METADATA"
@@ -0,0 +1,133 @@
+Metadata-Version: 2.4
+Name: requests
+Version: 2.32.5
+Summary: Python HTTP for Humans.
+Home-page: https://requests.readthedocs.io
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.org
+License: Apache-2.0
+Project-URL: Documentation, https://requests.readthedocs.io
+Project-URL: Source, https://github.com/psf/requests
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=3.9
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: charset_normalizer<4,>=2
+Requires-Dist: idna<4,>=2.5
+Requires-Dist: urllib3<3,>=1.21.1
+Requires-Dist: certifi>=2017.4.17
+Provides-Extra: security
+Provides-Extra: socks
+Requires-Dist: PySocks!=1.5.7,>=1.5.6; extra == "socks"
+Provides-Extra: use-chardet-on-py3
+Requires-Dist: chardet<6,>=3.0.2; extra == "use-chardet-on-py3"
+Dynamic: author
+Dynamic: author-email
+Dynamic: classifier
+Dynamic: description
+Dynamic: description-content-type
+Dynamic: home-page
+Dynamic: license
+Dynamic: license-file
+Dynamic: project-url
+Dynamic: provides-extra
+Dynamic: requires-dist
+Dynamic: requires-python
+Dynamic: summary
+
+# Requests
+
+**Requests** is a simple, yet elegant, HTTP library.
+
+```python
+>>> import requests
+>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
+>>> r.status_code
+200
+>>> r.headers['content-type']
+'application/json; charset=utf8'
+>>> r.encoding
+'utf-8'
+>>> r.text
+'{"authenticated": true, ...'
+>>> r.json()
+{'authenticated': True, ...}
+```
+
+Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
+
+Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
+
+[](https://pepy.tech/project/requests)
+[](https://pypi.org/project/requests)
+[](https://github.com/psf/requests/graphs/contributors)
+
+## Installing Requests and Supported Versions
+
+Requests is available on PyPI:
+
+```console
+$ python -m pip install requests
+```
+
+Requests officially supports Python 3.9+.
+
+## Supported Features & Best–Practices
+
+Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
+
+- Keep-Alive & Connection Pooling
+- International Domains and URLs
+- Sessions with Cookie Persistence
+- Browser-style TLS/SSL Verification
+- Basic & Digest Authentication
+- Familiar `dict`–like Cookies
+- Automatic Content Decompression and Decoding
+- Multi-part File Uploads
+- SOCKS Proxy Support
+- Connection Timeouts
+- Streaming Downloads
+- Automatic honoring of `.netrc`
+- Chunked HTTP Requests
+
+## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
+
+[](https://requests.readthedocs.io)
+
+## Cloning the repository
+
+When cloning the Requests repository, you may need to add the `-c
+fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit timestamp (see
+[this issue](https://github.com/psf/requests/issues/2690) for more background):
+
+```shell
+git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
+```
+
+You can also apply this setting to your global Git config:
+
+```shell
+git config --global fetch.fsck.badTimezone ignore
+```
+
+---
+
+[](https://kennethreitz.org) [](https://www.python.org/psf)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/RECORD"
new file mode 100644
index 0000000..ee0cf49
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/RECORD"
@@ -0,0 +1,43 @@
+requests-2.32.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+requests-2.32.5.dist-info/METADATA,sha256=ZbWgjagfSRVRPnYJZf8Ut1GPZbe7Pv4NqzZLvMTUDLA,4945
+requests-2.32.5.dist-info/RECORD,,
+requests-2.32.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+requests-2.32.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
+requests-2.32.5.dist-info/licenses/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
+requests-2.32.5.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
+requests/__init__.py,sha256=4xaAERmPDIBPsa2PsjpU9r06yooK-2mZKHTZAhWRWts,5072
+requests/__pycache__/__init__.cpython-312.pyc,,
+requests/__pycache__/__version__.cpython-312.pyc,,
+requests/__pycache__/_internal_utils.cpython-312.pyc,,
+requests/__pycache__/adapters.cpython-312.pyc,,
+requests/__pycache__/api.cpython-312.pyc,,
+requests/__pycache__/auth.cpython-312.pyc,,
+requests/__pycache__/certs.cpython-312.pyc,,
+requests/__pycache__/compat.cpython-312.pyc,,
+requests/__pycache__/cookies.cpython-312.pyc,,
+requests/__pycache__/exceptions.cpython-312.pyc,,
+requests/__pycache__/help.cpython-312.pyc,,
+requests/__pycache__/hooks.cpython-312.pyc,,
+requests/__pycache__/models.cpython-312.pyc,,
+requests/__pycache__/packages.cpython-312.pyc,,
+requests/__pycache__/sessions.cpython-312.pyc,,
+requests/__pycache__/status_codes.cpython-312.pyc,,
+requests/__pycache__/structures.cpython-312.pyc,,
+requests/__pycache__/utils.cpython-312.pyc,,
+requests/__version__.py,sha256=QKDceK8K_ujqwDDc3oYrR0odOBYgKVOQQ5vFap_G_cg,435
+requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
+requests/adapters.py,sha256=8nX113gbb123aUtx2ETkAN_6IsYX-M2fRoLGluTEcRk,26285
+requests/api.py,sha256=_Zb9Oa7tzVIizTKwFrPjDEY9ejtm_OnSRERnADxGsQs,6449
+requests/auth.py,sha256=kF75tqnLctZ9Mf_hm9TZIj4cQWnN5uxRz8oWsx5wmR0,10186
+requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429
+requests/compat.py,sha256=J7sIjR6XoDGp5JTVzOxkK5fSoUVUa_Pjc7iRZhAWGmI,2142
+requests/cookies.py,sha256=bNi-iqEj4NPZ00-ob-rHvzkvObzN3lEpgw3g6paS3Xw,18590
+requests/exceptions.py,sha256=jJPS1UWATs86ShVUaLorTiJb1SaGuoNEWgICJep-VkY,4260
+requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875
+requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
+requests/models.py,sha256=MjZdZ4k7tnw-1nz5PKShjmPmqyk0L6DciwnFngb_Vk4,35510
+requests/packages.py,sha256=_g0gZ681UyAlKHRjH6kanbaoxx2eAb6qzcXiODyTIoc,904
+requests/sessions.py,sha256=Cl1dpEnOfwrzzPbku-emepNeN4Rt_0_58Iy2x-JGTm8,30503
+requests/status_codes.py,sha256=iJUAeA25baTdw-6PfD0eF4qhpINDJRJI-yaMqxs4LEI,4322
+requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
+requests/utils.py,sha256=WqU86rZ3wvhC-tQjWcjtH_HEKZwWB3iWCZV6SW5DEdQ,33213
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/REQUESTED" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/REQUESTED"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/REQUESTED"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/WHEEL"
new file mode 100644
index 0000000..e7fa31b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..67db858
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/licenses/LICENSE"
@@ -0,0 +1,175 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/top_level.txt"
new file mode 100644
index 0000000..f229360
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests-2.32.5.dist-info/top_level.txt"
@@ -0,0 +1 @@
+requests
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/__init__.py"
new file mode 100644
index 0000000..051cda1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/__init__.py"
@@ -0,0 +1,184 @@
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
+Requests HTTP Library
+~~~~~~~~~~~~~~~~~~~~~
+
+Requests is an HTTP library, written in Python, for human beings.
+Basic GET usage:
+
+ >>> import requests
+ >>> r = requests.get('https://www.python.org')
+ >>> r.status_code
+ 200
+ >>> b'Python is a programming language' in r.content
+ True
+
+... or POST:
+
+ >>> payload = dict(key1='value1', key2='value2')
+ >>> r = requests.post('https://httpbin.org/post', data=payload)
+ >>> print(r.text)
+ {
+ ...
+ "form": {
+ "key1": "value1",
+ "key2": "value2"
+ },
+ ...
+ }
+
+The other HTTP methods are supported - see `requests.api`. Full documentation
+is at <https://requests.readthedocs.io>.
+
+:copyright: (c) 2017 by Kenneth Reitz.
+:license: Apache 2.0, see LICENSE for more details.
+"""
+
+import warnings
+
+import urllib3
+
+from .exceptions import RequestsDependencyWarning
+
+try:
+ from charset_normalizer import __version__ as charset_normalizer_version
+except ImportError:
+ charset_normalizer_version = None
+
+try:
+ from chardet import __version__ as chardet_version
+except ImportError:
+ chardet_version = None
+
+
+def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
+ urllib3_version = urllib3_version.split(".")
+ assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
+
+ # Sometimes, urllib3 only reports its version as 16.1.
+ if len(urllib3_version) == 2:
+ urllib3_version.append("0")
+
+ # Check urllib3 for compatibility.
+ major, minor, patch = urllib3_version # noqa: F811
+ major, minor, patch = int(major), int(minor), int(patch)
+ # urllib3 >= 1.21.1
+ assert major >= 1
+ if major == 1:
+ assert minor >= 21
+
+ # Check charset_normalizer for compatibility.
+ if chardet_version:
+ major, minor, patch = chardet_version.split(".")[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # chardet_version >= 3.0.2, < 6.0.0
+ assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
+ elif charset_normalizer_version:
+ major, minor, patch = charset_normalizer_version.split(".")[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # charset_normalizer >= 2.0.0 < 4.0.0
+ assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
+ else:
+ warnings.warn(
+ "Unable to find acceptable character detection dependency "
+ "(chardet or charset_normalizer).",
+ RequestsDependencyWarning,
+ )
+
+
+def _check_cryptography(cryptography_version):
+ # cryptography < 1.3.4
+ try:
+ cryptography_version = list(map(int, cryptography_version.split(".")))
+ except ValueError:
+ return
+
+ if cryptography_version < [1, 3, 4]:
+ warning = "Old version of cryptography ({}) may cause slowdown.".format(
+ cryptography_version
+ )
+ warnings.warn(warning, RequestsDependencyWarning)
+
+
+# Check imported dependencies for compatibility.
+try:
+ check_compatibility(
+ urllib3.__version__, chardet_version, charset_normalizer_version
+ )
+except (AssertionError, ValueError):
+ warnings.warn(
+ "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
+ "version!".format(
+ urllib3.__version__, chardet_version, charset_normalizer_version
+ ),
+ RequestsDependencyWarning,
+ )
+
+# Attempt to enable urllib3's fallback for SNI support
+# if the standard library doesn't support SNI or the
+# 'ssl' library isn't available.
+try:
+ try:
+ import ssl
+ except ImportError:
+ ssl = None
+
+ if not getattr(ssl, "HAS_SNI", False):
+ from urllib3.contrib import pyopenssl
+
+ pyopenssl.inject_into_urllib3()
+
+ # Check cryptography version
+ from cryptography import __version__ as cryptography_version
+
+ _check_cryptography(cryptography_version)
+except ImportError:
+ pass
+
+# urllib3's DependencyWarnings should be silenced.
+from urllib3.exceptions import DependencyWarning
+
+warnings.simplefilter("ignore", DependencyWarning)
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+from logging import NullHandler
+
+from . import packages, utils
+from .__version__ import (
+ __author__,
+ __author_email__,
+ __build__,
+ __cake__,
+ __copyright__,
+ __description__,
+ __license__,
+ __title__,
+ __url__,
+ __version__,
+)
+from .api import delete, get, head, options, patch, post, put, request
+from .exceptions import (
+ ConnectionError,
+ ConnectTimeout,
+ FileModeWarning,
+ HTTPError,
+ JSONDecodeError,
+ ReadTimeout,
+ RequestException,
+ Timeout,
+ TooManyRedirects,
+ URLRequired,
+)
+from .models import PreparedRequest, Request, Response
+from .sessions import Session, session
+from .status_codes import codes
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+# FileModeWarnings go off per the default.
+warnings.simplefilter("default", FileModeWarning, append=True)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/__version__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/__version__.py"
new file mode 100644
index 0000000..effdd98
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/__version__.py"
@@ -0,0 +1,14 @@
+# .-. .-. .-. . . .-. .-. .-. .-.
+# |( |- |.| | | |- `-. | `-.
+# ' ' `-' `-`.`-' `-' `-' ' `-'
+
+__title__ = "requests"
+__description__ = "Python HTTP for Humans."
+__url__ = "https://requests.readthedocs.io"
+__version__ = "2.32.5"
+__build__ = 0x023205
+__author__ = "Kenneth Reitz"
+__author_email__ = "me@kennethreitz.org"
+__license__ = "Apache-2.0"
+__copyright__ = "Copyright Kenneth Reitz"
+__cake__ = "\u2728 \U0001f370 \u2728"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/_internal_utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/_internal_utils.py"
new file mode 100644
index 0000000..f2cf635
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/_internal_utils.py"
@@ -0,0 +1,50 @@
+"""
+requests._internal_utils
+~~~~~~~~~~~~~~
+
+Provides utility functions that are consumed internally by Requests
+which depend on extremely few external helpers (such as compat)
+"""
+import re
+
+from .compat import builtin_str
+
+_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
+_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
+_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
+_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
+
+_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
+_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
+HEADER_VALIDATORS = {
+ bytes: _HEADER_VALIDATORS_BYTE,
+ str: _HEADER_VALIDATORS_STR,
+}
+
+
+def to_native_string(string, encoding="ascii"):
+ """Given a string object, regardless of type, returns a representation of
+ that string in the native string type, encoding and decoding where
+ necessary. This assumes ASCII unless told otherwise.
+ """
+ if isinstance(string, builtin_str):
+ out = string
+ else:
+ out = string.decode(encoding)
+
+ return out
+
+
+def unicode_is_ascii(u_string):
+ """Determine if unicode string only contains ASCII characters.
+
+ :param str u_string: unicode string to check. Must be unicode
+ and not Python 2 `str`.
+ :rtype: bool
+ """
+ assert isinstance(u_string, str)
+ try:
+ u_string.encode("ascii")
+ return True
+ except UnicodeEncodeError:
+ return False
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/adapters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/adapters.py"
new file mode 100644
index 0000000..670c927
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/adapters.py"
@@ -0,0 +1,696 @@
+"""
+requests.adapters
+~~~~~~~~~~~~~~~~~
+
+This module contains the transport adapters that Requests uses to define
+and maintain connections.
+"""
+
+import os.path
+import socket # noqa: F401
+import typing
+import warnings
+
+from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
+from urllib3.exceptions import HTTPError as _HTTPError
+from urllib3.exceptions import InvalidHeader as _InvalidHeader
+from urllib3.exceptions import (
+ LocationValueError,
+ MaxRetryError,
+ NewConnectionError,
+ ProtocolError,
+)
+from urllib3.exceptions import ProxyError as _ProxyError
+from urllib3.exceptions import ReadTimeoutError, ResponseError
+from urllib3.exceptions import SSLError as _SSLError
+from urllib3.poolmanager import PoolManager, proxy_from_url
+from urllib3.util import Timeout as TimeoutSauce
+from urllib3.util import parse_url
+from urllib3.util.retry import Retry
+
+from .auth import _basic_auth_str
+from .compat import basestring, urlparse
+from .cookies import extract_cookies_to_jar
+from .exceptions import (
+ ConnectionError,
+ ConnectTimeout,
+ InvalidHeader,
+ InvalidProxyURL,
+ InvalidSchema,
+ InvalidURL,
+ ProxyError,
+ ReadTimeout,
+ RetryError,
+ SSLError,
+)
+from .models import Response
+from .structures import CaseInsensitiveDict
+from .utils import (
+ DEFAULT_CA_BUNDLE_PATH,
+ extract_zipped_paths,
+ get_auth_from_url,
+ get_encoding_from_headers,
+ prepend_scheme_if_needed,
+ select_proxy,
+ urldefragauth,
+)
+
+try:
+ from urllib3.contrib.socks import SOCKSProxyManager
+except ImportError:
+
+ def SOCKSProxyManager(*args, **kwargs):
+ raise InvalidSchema("Missing dependencies for SOCKS support.")
+
+
+if typing.TYPE_CHECKING:
+ from .models import PreparedRequest
+
+
+DEFAULT_POOLBLOCK = False
+DEFAULT_POOLSIZE = 10
+DEFAULT_RETRIES = 0
+DEFAULT_POOL_TIMEOUT = None
+
+
+def _urllib3_request_context(
+ request: "PreparedRequest",
+ verify: "bool | str | None",
+ client_cert: "typing.Tuple[str, str] | str | None",
+ poolmanager: "PoolManager",
+) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
+ host_params = {}
+ pool_kwargs = {}
+ parsed_request_url = urlparse(request.url)
+ scheme = parsed_request_url.scheme.lower()
+ port = parsed_request_url.port
+
+ cert_reqs = "CERT_REQUIRED"
+ if verify is False:
+ cert_reqs = "CERT_NONE"
+ elif isinstance(verify, str):
+ if not os.path.isdir(verify):
+ pool_kwargs["ca_certs"] = verify
+ else:
+ pool_kwargs["ca_cert_dir"] = verify
+ pool_kwargs["cert_reqs"] = cert_reqs
+ if client_cert is not None:
+ if isinstance(client_cert, tuple) and len(client_cert) == 2:
+ pool_kwargs["cert_file"] = client_cert[0]
+ pool_kwargs["key_file"] = client_cert[1]
+ else:
+ # According to our docs, we allow users to specify just the client
+ # cert path
+ pool_kwargs["cert_file"] = client_cert
+ host_params = {
+ "scheme": scheme,
+ "host": parsed_request_url.hostname,
+ "port": port,
+ }
+ return host_params, pool_kwargs
+
+
+class BaseAdapter:
+ """The Base Transport Adapter"""
+
+ def __init__(self):
+ super().__init__()
+
+ def send(
+ self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
+ ):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ """
+ raise NotImplementedError
+
+ def close(self):
+ """Cleans up adapter specific items."""
+ raise NotImplementedError
+
+
+class HTTPAdapter(BaseAdapter):
+ """The built-in HTTP Adapter for urllib3.
+
+ Provides a general-case interface for Requests sessions to contact HTTP and
+ HTTPS urls by implementing the Transport Adapter interface. This class will
+ usually be created by the :class:`Session <Session>` class under the
+ covers.
+
+ :param pool_connections: The number of urllib3 connection pools to cache.
+ :param pool_maxsize: The maximum number of connections to save in the pool.
+ :param max_retries: The maximum number of retries each connection
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
+ :param pool_block: Whether the connection pool should block for connections.
+
+ Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
+ >>> s.mount('http://', a)
+ """
+
+ __attrs__ = [
+ "max_retries",
+ "config",
+ "_pool_connections",
+ "_pool_maxsize",
+ "_pool_block",
+ ]
+
+ def __init__(
+ self,
+ pool_connections=DEFAULT_POOLSIZE,
+ pool_maxsize=DEFAULT_POOLSIZE,
+ max_retries=DEFAULT_RETRIES,
+ pool_block=DEFAULT_POOLBLOCK,
+ ):
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
+ self.config = {}
+ self.proxy_manager = {}
+
+ super().__init__()
+
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+ self._pool_block = pool_block
+
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
+
+ def __getstate__(self):
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+
+ def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+ # self.poolmanager uses a lambda function, which isn't pickleable.
+ self.proxy_manager = {}
+ self.config = {}
+
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(
+ self._pool_connections, self._pool_maxsize, block=self._pool_block
+ )
+
+ def init_poolmanager(
+ self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
+ ):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param connections: The number of urllib3 connection pools to cache.
+ :param maxsize: The maximum number of connections to save in the pool.
+ :param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
+ """
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+ self._pool_block = block
+
+ self.poolmanager = PoolManager(
+ num_pools=connections,
+ maxsize=maxsize,
+ block=block,
+ **pool_kwargs,
+ )
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
+ :rtype: urllib3.ProxyManager
+ """
+ if proxy in self.proxy_manager:
+ manager = self.proxy_manager[proxy]
+ elif proxy.lower().startswith("socks"):
+ username, password = get_auth_from_url(proxy)
+ manager = self.proxy_manager[proxy] = SOCKSProxyManager(
+ proxy,
+ username=username,
+ password=password,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs,
+ )
+ else:
+ proxy_headers = self.proxy_headers(proxy)
+ manager = self.proxy_manager[proxy] = proxy_from_url(
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs,
+ )
+
+ return manager
+
+ def cert_verify(self, conn, url, verify, cert):
+ """Verify a SSL certificate. This method should not be called from user
+ code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param conn: The urllib3 connection object associated with the cert.
+ :param url: The requested URL.
+ :param verify: Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use
+ :param cert: The SSL certificate to verify.
+ """
+ if url.lower().startswith("https") and verify:
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if verify is not True:
+ cert_loc = verify
+
+ if not cert_loc:
+ cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
+
+ if not cert_loc or not os.path.exists(cert_loc):
+ raise OSError(
+ f"Could not find a suitable TLS CA certificate bundle, "
+ f"invalid path: {cert_loc}"
+ )
+
+ conn.cert_reqs = "CERT_REQUIRED"
+
+ if not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
+ else:
+ conn.cert_reqs = "CERT_NONE"
+ conn.ca_certs = None
+ conn.ca_cert_dir = None
+
+ if cert:
+ if not isinstance(cert, basestring):
+ conn.cert_file = cert[0]
+ conn.key_file = cert[1]
+ else:
+ conn.cert_file = cert
+ conn.key_file = None
+ if conn.cert_file and not os.path.exists(conn.cert_file):
+ raise OSError(
+ f"Could not find the TLS certificate file, "
+ f"invalid path: {conn.cert_file}"
+ )
+ if conn.key_file and not os.path.exists(conn.key_file):
+ raise OSError(
+ f"Could not find the TLS key file, invalid path: {conn.key_file}"
+ )
+
+ def build_response(self, req, resp):
+ """Builds a :class:`Response <requests.Response>` object from a urllib3
+ response. This should not be called from user code, and is only exposed
+ for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
+
+ :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
+ :param resp: The urllib3 response object.
+ :rtype: requests.Response
+ """
+ response = Response()
+
+ # Fallback to None if there's no status_code, for whatever reason.
+ response.status_code = getattr(resp, "status", None)
+
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+ response.raw = resp
+ response.reason = response.raw.reason
+
+ if isinstance(req.url, bytes):
+ response.url = req.url.decode("utf-8")
+ else:
+ response.url = req.url
+
+ # Add new cookies from the server.
+ extract_cookies_to_jar(response.cookies, req, resp)
+
+ # Give the Response some context.
+ response.request = req
+ response.connection = self
+
+ return response
+
+ def build_connection_pool_key_attributes(self, request, verify, cert=None):
+ """Build the PoolKey attributes used by urllib3 to return a connection.
+
+ This looks at the PreparedRequest, the user-specified verify value,
+ and the value of the cert parameter to determine what PoolKey values
+ to use to select a connection from a given urllib3 Connection Pool.
+
+ The SSL related pool key arguments are not consistently set. As of
+ this writing, use the following to determine what keys may be in that
+ dictionary:
+
+ * If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
+ default Requests SSL Context
+ * If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
+ ``"cert_reqs"`` will be set
+ * If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
+ ``"ca_certs"`` will be set if the string is not a directory recognized
+ by :py:func:`os.path.isdir`, otherwise ``"ca_cert_dir"`` will be
+ set.
+ * If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
+ ``"cert"`` is a tuple with a second item, ``"key_file"`` will also
+ be present
+
+ To override these settings, one may subclass this class, call this
+ method and use the above logic to change parameters as desired. For
+ example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
+ must both set ``"ssl_context"`` and based on what else they require,
+ alter the other keys to ensure the desired behaviour.
+
+ :param request:
+ The PreparedReqest being sent over the connection.
+ :type request:
+ :class:`~requests.models.PreparedRequest`
+ :param verify:
+ Either a boolean, in which case it controls whether
+ we verify the server's TLS certificate, or a string, in which case it
+ must be a path to a CA bundle to use.
+ :param cert:
+ (optional) Any user-provided SSL certificate for client
+ authentication (a.k.a., mTLS). This may be a string (i.e., just
+ the path to a file which holds both certificate and key) or a
+ tuple of length 2 with the certificate file path and key file
+ path.
+ :returns:
+ A tuple of two dictionaries. The first is the "host parameters"
+ portion of the Pool Key including scheme, hostname, and port. The
+ second is a dictionary of SSLContext related parameters.
+ """
+ return _urllib3_request_context(request, verify, cert, self.poolmanager)
+
+ def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
+ """Returns a urllib3 connection for the given request and TLS settings.
+ This should not be called from user code, and is only exposed for use
+ when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request:
+ The :class:`PreparedRequest <PreparedRequest>` object to be sent
+ over the connection.
+ :param verify:
+ Either a boolean, in which case it controls whether we verify the
+ server's TLS certificate, or a string, in which case it must be a
+ path to a CA bundle to use.
+ :param proxies:
+ (optional) The proxies dictionary to apply to the request.
+ :param cert:
+ (optional) Any user-provided SSL certificate to be used for client
+ authentication (a.k.a., mTLS).
+ :rtype:
+ urllib3.ConnectionPool
+ """
+ proxy = select_proxy(request.url, proxies)
+ try:
+ host_params, pool_kwargs = self.build_connection_pool_key_attributes(
+ request,
+ verify,
+ cert,
+ )
+ except ValueError as e:
+ raise InvalidURL(e, request=request)
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, "http")
+ proxy_url = parse_url(proxy)
+ if not proxy_url.host:
+ raise InvalidProxyURL(
+ "Please check proxy URL. It is malformed "
+ "and could be missing the host."
+ )
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_host(
+ **host_params, pool_kwargs=pool_kwargs
+ )
+ else:
+ # Only scheme should be lower case
+ conn = self.poolmanager.connection_from_host(
+ **host_params, pool_kwargs=pool_kwargs
+ )
+
+ return conn
+
+ def get_connection(self, url, proxies=None):
+ """DEPRECATED: Users should move to `get_connection_with_tls_context`
+ for all subclasses of HTTPAdapter using Requests>=2.32.2.
+
+ Returns a urllib3 connection for the given URL. This should not be
+ called from user code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param url: The URL to connect to.
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
+ :rtype: urllib3.ConnectionPool
+ """
+ warnings.warn(
+ (
+ "`get_connection` has been deprecated in favor of "
+ "`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
+ "will need to migrate for Requests>=2.32.2. Please see "
+ "https://github.com/psf/requests/pull/6710 for more details."
+ ),
+ DeprecationWarning,
+ )
+ proxy = select_proxy(url, proxies)
+
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, "http")
+ proxy_url = parse_url(proxy)
+ if not proxy_url.host:
+ raise InvalidProxyURL(
+ "Please check proxy URL. It is malformed "
+ "and could be missing the host."
+ )
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
+ else:
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
+
+ return conn
+
+ def close(self):
+ """Disposes of any internal state.
+
+ Currently, this closes the PoolManager and any active ProxyManager,
+ which closes any pooled connections.
+ """
+ self.poolmanager.clear()
+ for proxy in self.proxy_manager.values():
+ proxy.clear()
+
+ def request_url(self, request, proxies):
+ """Obtain the url to use when making the final request.
+
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
+ :rtype: str
+ """
+ proxy = select_proxy(request.url, proxies)
+ scheme = urlparse(request.url).scheme
+
+ is_proxied_http_request = proxy and scheme != "https"
+ using_socks_proxy = False
+ if proxy:
+ proxy_scheme = urlparse(proxy).scheme.lower()
+ using_socks_proxy = proxy_scheme.startswith("socks")
+
+ url = request.path_url
+ if url.startswith("//"): # Don't confuse urllib3
+ url = f"/{url.lstrip('/')}"
+
+ if is_proxied_http_request and not using_socks_proxy:
+ url = urldefragauth(request.url)
+
+ return url
+
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. As of v2.0 this does
+ nothing by default, but is left for overriding by users that subclass
+ the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
+ :param kwargs: The keyword arguments from the call to send().
+ """
+ pass
+
+ def proxy_headers(self, proxy):
+ """Returns a dictionary of the headers to add to any request sent
+ through a proxy. This works with urllib3 magic to ensure that they are
+ correctly sent to the proxy, rather than in a tunnelled request if
+ CONNECT is being used.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The url of the proxy being used for this request.
+ :rtype: dict
+ """
+ headers = {}
+ username, password = get_auth_from_url(proxy)
+
+ if username:
+ headers["Proxy-Authorization"] = _basic_auth_str(username, password)
+
+ return headers
+
+ def send(
+ self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
+ ):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple or urllib3 Timeout object
+ :param verify: (optional) Either a boolean, in which case it controls whether
+ we verify the server's TLS certificate, or a string, in which case it
+ must be a path to a CA bundle to use
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ :rtype: requests.Response
+ """
+
+ try:
+ conn = self.get_connection_with_tls_context(
+ request, verify, proxies=proxies, cert=cert
+ )
+ except LocationValueError as e:
+ raise InvalidURL(e, request=request)
+
+ self.cert_verify(conn, request.url, verify, cert)
+ url = self.request_url(request, proxies)
+ self.add_headers(
+ request,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ )
+
+ chunked = not (request.body is None or "Content-Length" in request.headers)
+
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError:
+ raise ValueError(
+ f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
+ f"or a single float to set both timeouts to the same value."
+ )
+ elif isinstance(timeout, TimeoutSauce):
+ pass
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
+
+ try:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout,
+ chunked=chunked,
+ )
+
+ except (ProtocolError, OSError) as err:
+ raise ConnectionError(err, request=request)
+
+ except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
+ # TODO: Remove this in 3.0.0: see #2811
+ if not isinstance(e.reason, NewConnectionError):
+ raise ConnectTimeout(e, request=request)
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
+ if isinstance(e.reason, _ProxyError):
+ raise ProxyError(e, request=request)
+
+ if isinstance(e.reason, _SSLError):
+ # This branch is for urllib3 v1.22 and later.
+ raise SSLError(e, request=request)
+
+ raise ConnectionError(e, request=request)
+
+ except ClosedPoolError as e:
+ raise ConnectionError(e, request=request)
+
+ except _ProxyError as e:
+ raise ProxyError(e)
+
+ except (_SSLError, _HTTPError) as e:
+ if isinstance(e, _SSLError):
+ # This branch is for urllib3 versions earlier than v1.22
+ raise SSLError(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
+ elif isinstance(e, _InvalidHeader):
+ raise InvalidHeader(e, request=request)
+ else:
+ raise
+
+ return self.build_response(request, resp)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/api.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/api.py"
new file mode 100644
index 0000000..5960744
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/api.py"
@@ -0,0 +1,157 @@
+"""
+requests.api
+~~~~~~~~~~~~
+
+This module implements the Requests API.
+
+:copyright: (c) 2012 by Kenneth Reitz.
+:license: Apache2, see LICENSE for more details.
+"""
+
+from . import sessions
+
+
+def request(method, url, **kwargs):
+ """Constructs and sends a :class:`Request <Request>`.
+
+ :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
+ ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
+ or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string
+ defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
+ to add for the file.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How many seconds to wait for the server to send data
+ before giving up, as a float, or a :ref:`(connect timeout, read
+ timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use. Defaults to ``True``.
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.request('GET', 'https://httpbin.org/get')
+ >>> req
+ <Response [200]>
+ """
+
+ # By using the 'with' statement we are sure the session is closed, thus we
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
+ # cases, and look like a memory leak in others.
+ with sessions.Session() as session:
+ return session.request(method=method, url=url, **kwargs)
+
+
+def get(url, params=None, **kwargs):
+ r"""Sends a GET request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request("get", url, params=params, **kwargs)
+
+
+def options(url, **kwargs):
+ r"""Sends an OPTIONS request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request("options", url, **kwargs)
+
+
+def head(url, **kwargs):
+ r"""Sends a HEAD request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes. If
+ `allow_redirects` is not provided, it will be set to `False` (as
+ opposed to the default :meth:`request` behavior).
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault("allow_redirects", False)
+ return request("head", url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
+ r"""Sends a POST request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request("post", url, data=data, json=json, **kwargs)
+
+
+def put(url, data=None, **kwargs):
+ r"""Sends a PUT request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request("put", url, data=data, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
+ r"""Sends a PATCH request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request("patch", url, data=data, **kwargs)
+
+
+def delete(url, **kwargs):
+ r"""Sends a DELETE request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request("delete", url, **kwargs)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/auth.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/auth.py"
new file mode 100644
index 0000000..4a7ce6d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/auth.py"
@@ -0,0 +1,314 @@
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import hashlib
+import os
+import re
+import threading
+import time
+import warnings
+from base64 import b64encode
+
+from ._internal_utils import to_native_string
+from .compat import basestring, str, urlparse
+from .cookies import extract_cookies_to_jar
+from .utils import parse_dict_header
+
+CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
+CONTENT_TYPE_MULTI_PART = "multipart/form-data"
+
+
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
+ # "I want us to put a big-ol' comment on top of it that
+ # says that this behaviour is dumb but we need to preserve
+ # it because people are relying on it."
+ # - Lukasa
+ #
+ # These are here solely to maintain backwards compatibility
+ # for things like ints. This will be removed in 3.0.0.
+ if not isinstance(username, basestring):
+ warnings.warn(
+ "Non-string usernames will no longer be supported in Requests "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "a string or bytes object in the near future to avoid "
+ "problems.".format(username),
+ category=DeprecationWarning,
+ )
+ username = str(username)
+
+ if not isinstance(password, basestring):
+ warnings.warn(
+ "Non-string passwords will no longer be supported in Requests "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "a string or bytes object in the near future to avoid "
+ "problems.".format(type(password)),
+ category=DeprecationWarning,
+ )
+ password = str(password)
+ # -- End Removal --
+
+ if isinstance(username, str):
+ username = username.encode("latin1")
+
+ if isinstance(password, str):
+ password = password.encode("latin1")
+
+ authstr = "Basic " + to_native_string(
+ b64encode(b":".join((username, password))).strip()
+ )
+
+ return authstr
+
+
+class AuthBase:
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError("Auth hooks must be callable.")
+
+
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __eq__(self, other):
+ return all(
+ [
+ self.username == getattr(other, "username", None),
+ self.password == getattr(other, "password", None),
+ ]
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __call__(self, r):
+ r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authentication to a given Request object."""
+
+ def __call__(self, r):
+ r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+ # Keep state in per-thread local storage
+ self._thread_local = threading.local()
+
+ def init_per_thread_state(self):
+ # Ensure state is initialized just once per-thread
+ if not hasattr(self._thread_local, "init"):
+ self._thread_local.init = True
+ self._thread_local.last_nonce = ""
+ self._thread_local.nonce_count = 0
+ self._thread_local.chal = {}
+ self._thread_local.pos = None
+ self._thread_local.num_401_calls = None
+
+ def build_digest_header(self, method, url):
+ """
+ :rtype: str
+ """
+
+ realm = self._thread_local.chal["realm"]
+ nonce = self._thread_local.chal["nonce"]
+ qop = self._thread_local.chal.get("qop")
+ algorithm = self._thread_local.chal.get("algorithm")
+ opaque = self._thread_local.chal.get("opaque")
+ hash_utf8 = None
+
+ if algorithm is None:
+ _algorithm = "MD5"
+ else:
+ _algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if _algorithm == "MD5" or _algorithm == "MD5-SESS":
+
+ def md5_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.md5(x).hexdigest()
+
+ hash_utf8 = md5_utf8
+ elif _algorithm == "SHA":
+
+ def sha_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.sha1(x).hexdigest()
+
+ hash_utf8 = sha_utf8
+ elif _algorithm == "SHA-256":
+
+ def sha256_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.sha256(x).hexdigest()
+
+ hash_utf8 = sha256_utf8
+ elif _algorithm == "SHA-512":
+
+ def sha512_utf8(x):
+ if isinstance(x, str):
+ x = x.encode("utf-8")
+ return hashlib.sha512(x).hexdigest()
+
+ hash_utf8 = sha512_utf8
+
+ KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
+
+ if hash_utf8 is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(url)
+ #: path is request-uri defined in RFC 2616 which should not be empty
+ path = p_parsed.path or "/"
+ if p_parsed.query:
+ path += f"?{p_parsed.query}"
+
+ A1 = f"{self.username}:{realm}:{self.password}"
+ A2 = f"{method}:{path}"
+
+ HA1 = hash_utf8(A1)
+ HA2 = hash_utf8(A2)
+
+ if nonce == self._thread_local.last_nonce:
+ self._thread_local.nonce_count += 1
+ else:
+ self._thread_local.nonce_count = 1
+ ncvalue = f"{self._thread_local.nonce_count:08x}"
+ s = str(self._thread_local.nonce_count).encode("utf-8")
+ s += nonce.encode("utf-8")
+ s += time.ctime().encode("utf-8")
+ s += os.urandom(8)
+
+ cnonce = hashlib.sha1(s).hexdigest()[:16]
+ if _algorithm == "MD5-SESS":
+ HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
+
+ if not qop:
+ respdig = KD(HA1, f"{nonce}:{HA2}")
+ elif qop == "auth" or "auth" in qop.split(","):
+ noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
+ respdig = KD(HA1, noncebit)
+ else:
+ # XXX handle auth-int.
+ return None
+
+ self._thread_local.last_nonce = nonce
+
+ # XXX should the partial digests be encoded too?
+ base = (
+ f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
+ f'uri="{path}", response="{respdig}"'
+ )
+ if opaque:
+ base += f', opaque="{opaque}"'
+ if algorithm:
+ base += f', algorithm="{algorithm}"'
+ if entdig:
+ base += f', digest="{entdig}"'
+ if qop:
+ base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
+
+ return f"Digest {base}"
+
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
+ self._thread_local.num_401_calls = 1
+
+ def handle_401(self, r, **kwargs):
+ """
+ Takes the given response and tries digest-auth, if needed.
+
+ :rtype: requests.Response
+ """
+
+ # If response is not 4xx, do not auth
+ # See https://github.com/psf/requests/issues/3772
+ if not 400 <= r.status_code < 500:
+ self._thread_local.num_401_calls = 1
+ return r
+
+ if self._thread_local.pos is not None:
+ # Rewind the file position indicator of the body to where
+ # it was to resend the request.
+ r.request.body.seek(self._thread_local.pos)
+ s_auth = r.headers.get("www-authenticate", "")
+
+ if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
+ self._thread_local.num_401_calls += 1
+ pat = re.compile(r"digest ", flags=re.IGNORECASE)
+ self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
+
+ # Consume content and release the original connection
+ # to allow our new request to reuse the same one.
+ r.content
+ r.close()
+ prep = r.request.copy()
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
+ prep.prepare_cookies(prep._cookies)
+
+ prep.headers["Authorization"] = self.build_digest_header(
+ prep.method, prep.url
+ )
+ _r = r.connection.send(prep, **kwargs)
+ _r.history.append(r)
+ _r.request = prep
+
+ return _r
+
+ self._thread_local.num_401_calls = 1
+ return r
+
+ def __call__(self, r):
+ # Initialize per-thread state, if needed
+ self.init_per_thread_state()
+ # If we have a saved nonce, skip the 401
+ if self._thread_local.last_nonce:
+ r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
+ try:
+ self._thread_local.pos = r.body.tell()
+ except AttributeError:
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
+ self._thread_local.pos = None
+ r.register_hook("response", self.handle_401)
+ r.register_hook("response", self.handle_redirect)
+ self._thread_local.num_401_calls = 1
+
+ return r
+
+ def __eq__(self, other):
+ return all(
+ [
+ self.username == getattr(other, "username", None),
+ self.password == getattr(other, "password", None),
+ ]
+ )
+
+ def __ne__(self, other):
+ return not self == other
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/certs.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/certs.py"
new file mode 100644
index 0000000..be422c3
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/certs.py"
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+"""
+requests.certs
+~~~~~~~~~~~~~~
+
+This module returns the preferred default CA certificate bundle. There is
+only one — the one from the certifi package.
+
+If you are packaging Requests, e.g., for a Linux distribution or a managed
+environment, you can change the definition of where() to return a separately
+packaged CA bundle.
+"""
+from certifi import where
+
+if __name__ == "__main__":
+ print(where())
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/compat.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/compat.py"
new file mode 100644
index 0000000..7f9d754
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/compat.py"
@@ -0,0 +1,106 @@
+"""
+requests.compat
+~~~~~~~~~~~~~~~
+
+This module previously handled import compatibility issues
+between Python 2 and Python 3. It remains for backwards
+compatibility until the next major version.
+"""
+
+import importlib
+import sys
+
+# -------
+# urllib3
+# -------
+from urllib3 import __version__ as urllib3_version
+
+# Detect which major version of urllib3 is being used.
+try:
+ is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1
+except (TypeError, AttributeError):
+ # If we can't discern a version, prefer old functionality.
+ is_urllib3_1 = True
+
+# -------------------
+# Character Detection
+# -------------------
+
+
+def _resolve_char_detection():
+ """Find supported character detection libraries."""
+ chardet = None
+ for lib in ("chardet", "charset_normalizer"):
+ if chardet is None:
+ try:
+ chardet = importlib.import_module(lib)
+ except ImportError:
+ pass
+ return chardet
+
+
+chardet = _resolve_char_detection()
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = _ver[0] == 2
+
+#: Python 3.x?
+is_py3 = _ver[0] == 3
+
+# json/simplejson module import resolution
+has_simplejson = False
+try:
+ import simplejson as json
+
+ has_simplejson = True
+except ImportError:
+ import json
+
+if has_simplejson:
+ from simplejson import JSONDecodeError
+else:
+ from json import JSONDecodeError
+
+# Keep OrderedDict for backwards compatibility.
+from collections import OrderedDict
+from collections.abc import Callable, Mapping, MutableMapping
+from http import cookiejar as cookielib
+from http.cookies import Morsel
+from io import StringIO
+
+# --------------
+# Legacy Imports
+# --------------
+from urllib.parse import (
+ quote,
+ quote_plus,
+ unquote,
+ unquote_plus,
+ urldefrag,
+ urlencode,
+ urljoin,
+ urlparse,
+ urlsplit,
+ urlunparse,
+)
+from urllib.request import (
+ getproxies,
+ getproxies_environment,
+ parse_http_list,
+ proxy_bypass,
+ proxy_bypass_environment,
+)
+
+builtin_str = str
+str = str
+bytes = bytes
+basestring = (str, bytes)
+numeric_types = (int, float)
+integer_types = (int,)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/cookies.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/cookies.py"
new file mode 100644
index 0000000..f69d0cd
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/cookies.py"
@@ -0,0 +1,561 @@
+"""
+requests.cookies
+~~~~~~~~~~~~~~~~
+
+Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
+
+requests.utils imports from here, so be careful with imports.
+"""
+
+import calendar
+import copy
+import time
+
+from ._internal_utils import to_native_string
+from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
+
+
+class MockRequest:
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
+
+ The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
+ domains of the request and the cookie.
+
+ The original request object is read-only. The client is responsible for collecting
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
+ probably want `get_cookie_header`, defined below.
+ """
+
+ def __init__(self, request):
+ self._r = request
+ self._new_headers = {}
+ self.type = urlparse(self._r.url).scheme
+
+ def get_type(self):
+ return self.type
+
+ def get_host(self):
+ return urlparse(self._r.url).netloc
+
+ def get_origin_req_host(self):
+ return self.get_host()
+
+ def get_full_url(self):
+ # Only return the response's URL if the user hadn't set the Host
+ # header
+ if not self._r.headers.get("Host"):
+ return self._r.url
+ # If they did set it, retrieve it and reconstruct the expected domain
+ host = to_native_string(self._r.headers["Host"], encoding="utf-8")
+ parsed = urlparse(self._r.url)
+ # Reconstruct the URL as we expect it
+ return urlunparse(
+ [
+ parsed.scheme,
+ host,
+ parsed.path,
+ parsed.params,
+ parsed.query,
+ parsed.fragment,
+ ]
+ )
+
+ def is_unverifiable(self):
+ return True
+
+ def has_header(self, name):
+ return name in self._r.headers or name in self._new_headers
+
+ def get_header(self, name, default=None):
+ return self._r.headers.get(name, self._new_headers.get(name, default))
+
+ def add_header(self, key, val):
+ """cookiejar has no legitimate use for this method; add it back if you find one."""
+ raise NotImplementedError(
+ "Cookie headers should be added with add_unredirected_header()"
+ )
+
+ def add_unredirected_header(self, name, value):
+ self._new_headers[name] = value
+
+ def get_new_headers(self):
+ return self._new_headers
+
+ @property
+ def unverifiable(self):
+ return self.is_unverifiable()
+
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+
+class MockResponse:
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
+
+ ...what? Basically, expose the parsed HTTP headers from the server response
+ the way `http.cookiejar` expects to see them.
+ """
+
+ def __init__(self, headers):
+ """Make a MockResponse for `cookiejar` to read.
+
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
+ """
+ self._headers = headers
+
+ def info(self):
+ return self._headers
+
+ def getheaders(self, name):
+ self._headers.getheaders(name)
+
+
+def extract_cookies_to_jar(jar, request, response):
+ """Extract the cookies from the response into a CookieJar.
+
+ :param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
+ :param request: our own requests.Request object
+ :param response: urllib3.HTTPResponse object
+ """
+ if not (hasattr(response, "_original_response") and response._original_response):
+ return
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
+ req = MockRequest(request)
+ # pull out the HTTPMessage with the headers and put it in the mock:
+ res = MockResponse(response._original_response.msg)
+ jar.extract_cookies(res, req)
+
+
+def get_cookie_header(jar, request):
+ """
+ Produce an appropriate Cookie header string to be sent with `request`, or None.
+
+ :rtype: str
+ """
+ r = MockRequest(request)
+ jar.add_cookie_header(r)
+ return r.get_new_headers().get("Cookie")
+
+
+def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
+ """Unsets a cookie by name, by default over all domains and paths.
+
+ Wraps CookieJar.clear(), is O(n).
+ """
+ clearables = []
+ for cookie in cookiejar:
+ if cookie.name != name:
+ continue
+ if domain is not None and domain != cookie.domain:
+ continue
+ if path is not None and path != cookie.path:
+ continue
+ clearables.append((cookie.domain, cookie.path, cookie.name))
+
+ for domain, path, name in clearables:
+ cookiejar.clear(domain, path, name)
+
+
+class CookieConflictError(RuntimeError):
+ """There are two cookies that meet the criteria specified in the cookie jar.
+ Use .get and .set and include domain and path args in order to be more specific.
+ """
+
+
+class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
+ """Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
+ interface.
+
+ This is the CookieJar we create by default for requests and sessions that
+ don't specify one, since some clients may expect response.cookies and
+ session.cookies to support dict operations.
+
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
+
+ Unlike a regular CookieJar, this class is pickleable.
+
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
+
+ def get(self, name, default=None, domain=None, path=None):
+ """Dict-like get() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1).
+ """
+ try:
+ return self._find_no_duplicates(name, domain, path)
+ except KeyError:
+ return default
+
+ def set(self, name, value, **kwargs):
+ """Dict-like set() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+ """
+ # support client code that unsets cookies by assignment of a None value:
+ if value is None:
+ remove_cookie_by_name(
+ self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
+ )
+ return
+
+ if isinstance(value, Morsel):
+ c = morsel_to_cookie(value)
+ else:
+ c = create_cookie(name, value, **kwargs)
+ self.set_cookie(c)
+ return c
+
+ def iterkeys(self):
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar.
+
+ .. seealso:: itervalues() and iteritems().
+ """
+ for cookie in iter(self):
+ yield cookie.name
+
+ def keys(self):
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar.
+
+ .. seealso:: values() and items().
+ """
+ return list(self.iterkeys())
+
+ def itervalues(self):
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar.
+
+ .. seealso:: iterkeys() and iteritems().
+ """
+ for cookie in iter(self):
+ yield cookie.value
+
+ def values(self):
+ """Dict-like values() that returns a list of values of cookies from the
+ jar.
+
+ .. seealso:: keys() and items().
+ """
+ return list(self.itervalues())
+
+ def iteritems(self):
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar.
+
+ .. seealso:: iterkeys() and itervalues().
+ """
+ for cookie in iter(self):
+ yield cookie.name, cookie.value
+
+ def items(self):
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
+ vanilla python dict of key value pairs.
+
+ .. seealso:: keys() and values().
+ """
+ return list(self.iteritems())
+
+ def list_domains(self):
+ """Utility method to list all the domains in the jar."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain not in domains:
+ domains.append(cookie.domain)
+ return domains
+
+ def list_paths(self):
+ """Utility method to list all the paths in the jar."""
+ paths = []
+ for cookie in iter(self):
+ if cookie.path not in paths:
+ paths.append(cookie.path)
+ return paths
+
+ def multiple_domains(self):
+ """Returns True if there are multiple domains in the jar.
+ Returns False otherwise.
+
+ :rtype: bool
+ """
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain is not None and cookie.domain in domains:
+ return True
+ domains.append(cookie.domain)
+ return False # there is only one domain in jar
+
+ def get_dict(self, domain=None, path=None):
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements.
+
+ :rtype: dict
+ """
+ dictionary = {}
+ for cookie in iter(self):
+ if (domain is None or cookie.domain == domain) and (
+ path is None or cookie.path == path
+ ):
+ dictionary[cookie.name] = cookie.value
+ return dictionary
+
+ def __contains__(self, name):
+ try:
+ return super().__contains__(name)
+ except CookieConflictError:
+ return True
+
+ def __getitem__(self, name):
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1).
+ """
+ return self._find_no_duplicates(name)
+
+ def __setitem__(self, name, value):
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead.
+ """
+ self.set(name, value)
+
+ def __delitem__(self, name):
+ """Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
+ ``remove_cookie_by_name()``.
+ """
+ remove_cookie_by_name(self, name)
+
+ def set_cookie(self, cookie, *args, **kwargs):
+ if (
+ hasattr(cookie.value, "startswith")
+ and cookie.value.startswith('"')
+ and cookie.value.endswith('"')
+ ):
+ cookie.value = cookie.value.replace('\\"', "")
+ return super().set_cookie(cookie, *args, **kwargs)
+
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
+ self.set_cookie(copy.copy(cookie))
+ else:
+ super().update(other)
+
+ def _find(self, name, domain=None, path=None):
+ """Requests uses this method internally to get cookie values.
+
+ If there are conflicting cookies, _find arbitrarily chooses one.
+ See _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies.
+
+ :param name: a string containing name of cookie
+ :param domain: (optional) string containing domain of cookie
+ :param path: (optional) string containing path of cookie
+ :return: cookie.value
+ """
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ return cookie.value
+
+ raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
+
+ def _find_no_duplicates(self, name, domain=None, path=None):
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests.
+
+ :param name: a string containing name of cookie
+ :param domain: (optional) string containing domain of cookie
+ :param path: (optional) string containing path of cookie
+ :raises KeyError: if cookie is not found
+ :raises CookieConflictError: if there are multiple cookies
+ that match name and optionally domain and path
+ :return: cookie.value
+ """
+ toReturn = None
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ if toReturn is not None:
+ # if there are multiple cookies that meet passed in criteria
+ raise CookieConflictError(
+ f"There are multiple cookies with name, {name!r}"
+ )
+ # we will eventually return this as long as no cookie conflict
+ toReturn = cookie.value
+
+ if toReturn:
+ return toReturn
+ raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
+
+ def __getstate__(self):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ state = self.__dict__.copy()
+ # remove the unpickleable RLock object
+ state.pop("_cookies_lock")
+ return state
+
+ def __setstate__(self, state):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ self.__dict__.update(state)
+ if "_cookies_lock" not in self.__dict__:
+ self._cookies_lock = threading.RLock()
+
+ def copy(self):
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
+ new_cj.set_policy(self.get_policy())
+ new_cj.update(self)
+ return new_cj
+
+ def get_policy(self):
+ """Return the CookiePolicy instance used."""
+ return self._policy
+
+
+def _copy_cookie_jar(jar):
+ if jar is None:
+ return None
+
+ if hasattr(jar, "copy"):
+ # We're dealing with an instance of RequestsCookieJar
+ return jar.copy()
+ # We're dealing with a generic CookieJar instance
+ new_jar = copy.copy(jar)
+ new_jar.clear()
+ for cookie in jar:
+ new_jar.set_cookie(copy.copy(cookie))
+ return new_jar
+
+
+def create_cookie(name, value, **kwargs):
+ """Make a cookie from underspecified parameters.
+
+ By default, the pair of `name` and `value` will be set for the domain ''
+ and sent on every request (this is sometimes called a "supercookie").
+ """
+ result = {
+ "version": 0,
+ "name": name,
+ "value": value,
+ "port": None,
+ "domain": "",
+ "path": "/",
+ "secure": False,
+ "expires": None,
+ "discard": True,
+ "comment": None,
+ "comment_url": None,
+ "rest": {"HttpOnly": None},
+ "rfc2109": False,
+ }
+
+ badargs = set(kwargs) - set(result)
+ if badargs:
+ raise TypeError(
+ f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
+ )
+
+ result.update(kwargs)
+ result["port_specified"] = bool(result["port"])
+ result["domain_specified"] = bool(result["domain"])
+ result["domain_initial_dot"] = result["domain"].startswith(".")
+ result["path_specified"] = bool(result["path"])
+
+ return cookielib.Cookie(**result)
+
+
+def morsel_to_cookie(morsel):
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
+
+ expires = None
+ if morsel["max-age"]:
+ try:
+ expires = int(time.time() + int(morsel["max-age"]))
+ except ValueError:
+ raise TypeError(f"max-age: {morsel['max-age']} must be integer")
+ elif morsel["expires"]:
+ time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
+ expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
+ return create_cookie(
+ comment=morsel["comment"],
+ comment_url=bool(morsel["comment"]),
+ discard=False,
+ domain=morsel["domain"],
+ expires=expires,
+ name=morsel.key,
+ path=morsel["path"],
+ port=None,
+ rest={"HttpOnly": morsel["httponly"]},
+ rfc2109=False,
+ secure=bool(morsel["secure"]),
+ value=morsel.value,
+ version=morsel["version"] or 0,
+ )
+
+
+def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
+ :param overwrite: (optional) If False, will not replace cookies
+ already in the jar with new ones.
+ :rtype: CookieJar
+ """
+ if cookiejar is None:
+ cookiejar = RequestsCookieJar()
+
+ if cookie_dict is not None:
+ names_from_jar = [cookie.name for cookie in cookiejar]
+ for name in cookie_dict:
+ if overwrite or (name not in names_from_jar):
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
+
+ return cookiejar
+
+
+def merge_cookies(cookiejar, cookies):
+ """Add cookies to cookiejar and returns a merged CookieJar.
+
+ :param cookiejar: CookieJar object to add the cookies to.
+ :param cookies: Dictionary or CookieJar object to be added.
+ :rtype: CookieJar
+ """
+ if not isinstance(cookiejar, cookielib.CookieJar):
+ raise ValueError("You can only merge into CookieJar")
+
+ if isinstance(cookies, dict):
+ cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
+ elif isinstance(cookies, cookielib.CookieJar):
+ try:
+ cookiejar.update(cookies)
+ except AttributeError:
+ for cookie_in_jar in cookies:
+ cookiejar.set_cookie(cookie_in_jar)
+
+ return cookiejar
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/exceptions.py"
new file mode 100644
index 0000000..83986b4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/exceptions.py"
@@ -0,0 +1,151 @@
+"""
+requests.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
+"""
+from urllib3.exceptions import HTTPError as BaseHTTPError
+
+from .compat import JSONDecodeError as CompatJSONDecodeError
+
+
+class RequestException(IOError):
+ """There was an ambiguous exception that occurred while handling your
+ request.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize RequestException with `request` and `response` objects."""
+ response = kwargs.pop("response", None)
+ self.response = response
+ self.request = kwargs.pop("request", None)
+ if response is not None and not self.request and hasattr(response, "request"):
+ self.request = self.response.request
+ super().__init__(*args, **kwargs)
+
+
+class InvalidJSONError(RequestException):
+ """A JSON error occurred."""
+
+
+class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
+ """Couldn't decode the text into json"""
+
+ def __init__(self, *args, **kwargs):
+ """
+ Construct the JSONDecodeError instance first with all
+ args. Then use it's args to construct the IOError so that
+ the json specific args aren't used as IOError specific args
+ and the error message from JSONDecodeError is preserved.
+ """
+ CompatJSONDecodeError.__init__(self, *args)
+ InvalidJSONError.__init__(self, *self.args, **kwargs)
+
+ def __reduce__(self):
+ """
+ The __reduce__ method called when pickling the object must
+ be the one from the JSONDecodeError (be it json/simplejson)
+ as it expects all the arguments for instantiation, not just
+ one like the IOError, and the MRO would by default call the
+ __reduce__ method from the IOError due to the inheritance order.
+ """
+ return CompatJSONDecodeError.__reduce__(self)
+
+
+class HTTPError(RequestException):
+ """An HTTP error occurred."""
+
+
+class ConnectionError(RequestException):
+ """A Connection error occurred."""
+
+
+class ProxyError(ConnectionError):
+ """A proxy error occurred."""
+
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
+
+
+class Timeout(RequestException):
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
+
+
+class URLRequired(RequestException):
+ """A valid URL is required to make a request."""
+
+
+class TooManyRedirects(RequestException):
+ """Too many redirects."""
+
+
+class MissingSchema(RequestException, ValueError):
+ """The URL scheme (e.g. http or https) is missing."""
+
+
+class InvalidSchema(RequestException, ValueError):
+ """The URL scheme provided is either invalid or unsupported."""
+
+
+class InvalidURL(RequestException, ValueError):
+ """The URL provided was somehow invalid."""
+
+
+class InvalidHeader(RequestException, ValueError):
+ """The header value provided was somehow invalid."""
+
+
+class InvalidProxyURL(InvalidURL):
+ """The proxy URL provided is invalid."""
+
+
+class ChunkedEncodingError(RequestException):
+ """The server declared chunked encoding but sent an invalid chunk."""
+
+
+class ContentDecodingError(RequestException, BaseHTTPError):
+ """Failed to decode response content."""
+
+
+class StreamConsumedError(RequestException, TypeError):
+ """The content for this response was already consumed."""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
+
+
+class UnrewindableBodyError(RequestException):
+ """Requests encountered an error when trying to rewind a body."""
+
+
+# Warnings
+
+
+class RequestsWarning(Warning):
+ """Base warning for Requests."""
+
+
+class FileModeWarning(RequestsWarning, DeprecationWarning):
+ """A file was opened in text mode, but Requests determined its binary length."""
+
+
+class RequestsDependencyWarning(RequestsWarning):
+ """An imported dependency doesn't match the expected version range."""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/help.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/help.py"
new file mode 100644
index 0000000..8fbcd65
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/help.py"
@@ -0,0 +1,134 @@
+"""Module containing bug report helper(s)."""
+
+import json
+import platform
+import ssl
+import sys
+
+import idna
+import urllib3
+
+from . import __version__ as requests_version
+
+try:
+ import charset_normalizer
+except ImportError:
+ charset_normalizer = None
+
+try:
+ import chardet
+except ImportError:
+ chardet = None
+
+try:
+ from urllib3.contrib import pyopenssl
+except ImportError:
+ pyopenssl = None
+ OpenSSL = None
+ cryptography = None
+else:
+ import cryptography
+ import OpenSSL
+
+
+def _implementation():
+ """Return a dict with the Python implementation and version.
+
+ Provide both the name and the version of the Python implementation
+ currently running. For example, on CPython 3.10.3 it will return
+ {'name': 'CPython', 'version': '3.10.3'}.
+
+ This function works best on CPython and PyPy: in particular, it probably
+ doesn't work for Jython or IronPython. Future investigation should be done
+ to work out the correct shape of the code for those platforms.
+ """
+ implementation = platform.python_implementation()
+
+ if implementation == "CPython":
+ implementation_version = platform.python_version()
+ elif implementation == "PyPy":
+ implementation_version = "{}.{}.{}".format(
+ sys.pypy_version_info.major,
+ sys.pypy_version_info.minor,
+ sys.pypy_version_info.micro,
+ )
+ if sys.pypy_version_info.releaselevel != "final":
+ implementation_version = "".join(
+ [implementation_version, sys.pypy_version_info.releaselevel]
+ )
+ elif implementation == "Jython":
+ implementation_version = platform.python_version() # Complete Guess
+ elif implementation == "IronPython":
+ implementation_version = platform.python_version() # Complete Guess
+ else:
+ implementation_version = "Unknown"
+
+ return {"name": implementation, "version": implementation_version}
+
+
+def info():
+ """Generate information for a bug report."""
+ try:
+ platform_info = {
+ "system": platform.system(),
+ "release": platform.release(),
+ }
+ except OSError:
+ platform_info = {
+ "system": "Unknown",
+ "release": "Unknown",
+ }
+
+ implementation_info = _implementation()
+ urllib3_info = {"version": urllib3.__version__}
+ charset_normalizer_info = {"version": None}
+ chardet_info = {"version": None}
+ if charset_normalizer:
+ charset_normalizer_info = {"version": charset_normalizer.__version__}
+ if chardet:
+ chardet_info = {"version": chardet.__version__}
+
+ pyopenssl_info = {
+ "version": None,
+ "openssl_version": "",
+ }
+ if OpenSSL:
+ pyopenssl_info = {
+ "version": OpenSSL.__version__,
+ "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
+ }
+ cryptography_info = {
+ "version": getattr(cryptography, "__version__", ""),
+ }
+ idna_info = {
+ "version": getattr(idna, "__version__", ""),
+ }
+
+ system_ssl = ssl.OPENSSL_VERSION_NUMBER
+ system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
+
+ return {
+ "platform": platform_info,
+ "implementation": implementation_info,
+ "system_ssl": system_ssl_info,
+ "using_pyopenssl": pyopenssl is not None,
+ "using_charset_normalizer": chardet is None,
+ "pyOpenSSL": pyopenssl_info,
+ "urllib3": urllib3_info,
+ "chardet": chardet_info,
+ "charset_normalizer": charset_normalizer_info,
+ "cryptography": cryptography_info,
+ "idna": idna_info,
+ "requests": {
+ "version": requests_version,
+ },
+ }
+
+
+def main():
+ """Pretty-print the bug information as JSON."""
+ print(json.dumps(info(), sort_keys=True, indent=2))
+
+
+if __name__ == "__main__":
+ main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/hooks.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/hooks.py"
new file mode 100644
index 0000000..d181ba2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/hooks.py"
@@ -0,0 +1,33 @@
+"""
+requests.hooks
+~~~~~~~~~~~~~~
+
+This module provides the capabilities for the Requests hooks system.
+
+Available hooks:
+
+``response``:
+ The response generated from a Request.
+"""
+HOOKS = ["response"]
+
+
+def default_hooks():
+ return {event: [] for event in HOOKS}
+
+
+# TODO: response is the only one
+
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
+ """Dispatches a hook dictionary on a given piece of data."""
+ hooks = hooks or {}
+ hooks = hooks.get(key)
+ if hooks:
+ if hasattr(hooks, "__call__"):
+ hooks = [hooks]
+ for hook in hooks:
+ _hook_data = hook(hook_data, **kwargs)
+ if _hook_data is not None:
+ hook_data = _hook_data
+ return hook_data
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/models.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/models.py"
new file mode 100644
index 0000000..c4b25fa
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/models.py"
@@ -0,0 +1,1039 @@
+"""
+requests.models
+~~~~~~~~~~~~~~~
+
+This module contains the primary objects that power Requests.
+"""
+
+import datetime
+
+# Import encoding now, to avoid implicit import later.
+# Implicit import within threads may cause LookupError when standard library is in a ZIP,
+# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
+import encodings.idna # noqa: F401
+from io import UnsupportedOperation
+
+from urllib3.exceptions import (
+ DecodeError,
+ LocationParseError,
+ ProtocolError,
+ ReadTimeoutError,
+ SSLError,
+)
+from urllib3.fields import RequestField
+from urllib3.filepost import encode_multipart_formdata
+from urllib3.util import parse_url
+
+from ._internal_utils import to_native_string, unicode_is_ascii
+from .auth import HTTPBasicAuth
+from .compat import (
+ Callable,
+ JSONDecodeError,
+ Mapping,
+ basestring,
+ builtin_str,
+ chardet,
+ cookielib,
+)
+from .compat import json as complexjson
+from .compat import urlencode, urlsplit, urlunparse
+from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
+from .exceptions import (
+ ChunkedEncodingError,
+ ConnectionError,
+ ContentDecodingError,
+ HTTPError,
+ InvalidJSONError,
+ InvalidURL,
+)
+from .exceptions import JSONDecodeError as RequestsJSONDecodeError
+from .exceptions import MissingSchema
+from .exceptions import SSLError as RequestsSSLError
+from .exceptions import StreamConsumedError
+from .hooks import default_hooks
+from .status_codes import codes
+from .structures import CaseInsensitiveDict
+from .utils import (
+ check_header_validity,
+ get_auth_from_url,
+ guess_filename,
+ guess_json_utf,
+ iter_slices,
+ parse_header_links,
+ requote_uri,
+ stream_decode_response_unicode,
+ super_len,
+ to_key_val_list,
+)
+
+#: The set of HTTP status codes that indicate an automatically
+#: processable redirect.
+REDIRECT_STATI = (
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_redirect, # 307
+ codes.permanent_redirect, # 308
+)
+
+DEFAULT_REDIRECT_LIMIT = 30
+CONTENT_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
+
+
+class RequestEncodingMixin:
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.url)
+
+ path = p.path
+ if not path:
+ path = "/"
+
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append("?")
+ url.append(query)
+
+ return "".join(url)
+
+ @staticmethod
+ def _encode_params(data):
+ """Encode parameters in a piece of data.
+
+ Will successfully encode parameters when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+ """
+
+ if isinstance(data, (str, bytes)):
+ return data
+ elif hasattr(data, "read"):
+ return data
+ elif hasattr(data, "__iter__"):
+ result = []
+ for k, vs in to_key_val_list(data):
+ if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
+ vs = [vs]
+ for v in vs:
+ if v is not None:
+ result.append(
+ (
+ k.encode("utf-8") if isinstance(k, str) else k,
+ v.encode("utf-8") if isinstance(v, str) else v,
+ )
+ )
+ return urlencode(result, doseq=True)
+ else:
+ return data
+
+ @staticmethod
+ def _encode_files(files, data):
+ """Build the body for a multipart/form-data request.
+
+ Will successfully encode files when passed as a dict or a list of
+ tuples. Order is retained if data is a list of tuples but arbitrary
+ if parameters are supplied as a dict.
+ The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
+ or 4-tuples (filename, fileobj, contentype, custom_headers).
+ """
+ if not files:
+ raise ValueError("Files must be provided.")
+ elif isinstance(data, basestring):
+ raise ValueError("Data must not be a string.")
+
+ new_fields = []
+ fields = to_key_val_list(data or {})
+ files = to_key_val_list(files or {})
+
+ for field, val in fields:
+ if isinstance(val, basestring) or not hasattr(val, "__iter__"):
+ val = [val]
+ for v in val:
+ if v is not None:
+ # Don't call str() on bytestrings: in Py3 it all goes wrong.
+ if not isinstance(v, bytes):
+ v = str(v)
+
+ new_fields.append(
+ (
+ field.decode("utf-8")
+ if isinstance(field, bytes)
+ else field,
+ v.encode("utf-8") if isinstance(v, str) else v,
+ )
+ )
+
+ for k, v in files:
+ # support for explicit filename
+ ft = None
+ fh = None
+ if isinstance(v, (tuple, list)):
+ if len(v) == 2:
+ fn, fp = v
+ elif len(v) == 3:
+ fn, fp, ft = v
+ else:
+ fn, fp, ft, fh = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+
+ if isinstance(fp, (str, bytes, bytearray)):
+ fdata = fp
+ elif hasattr(fp, "read"):
+ fdata = fp.read()
+ elif fp is None:
+ continue
+ else:
+ fdata = fp
+
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
+ rf.make_multipart(content_type=ft)
+ new_fields.append(rf)
+
+ body, content_type = encode_multipart_formdata(new_fields)
+
+ return body, content_type
+
+
+class RequestHooksMixin:
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ if event not in self.hooks:
+ raise ValueError(f'Unsupported event specified, with event name "{event}"')
+
+ if isinstance(hook, Callable):
+ self.hooks[event].append(hook)
+ elif hasattr(hook, "__iter__"):
+ self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
+
+ def deregister_hook(self, event, hook):
+ """Deregister a previously registered hook.
+ Returns True if the hook existed, False if not.
+ """
+
+ try:
+ self.hooks[event].remove(hook)
+ return True
+ except ValueError:
+ return False
+
+
+class Request(RequestHooksMixin):
+ """A user-created :class:`Request <Request>` object.
+
+ Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
+
+ :param method: HTTP method to use.
+ :param url: URL to send.
+ :param headers: dictionary of headers to send.
+ :param files: dictionary of {filename: fileobject} files to multipart upload.
+ :param data: the body to attach to the request. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
+ :param json: json for the body to attach to the request (if files or data is not specified).
+ :param params: URL parameters to append to the URL. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
+ :param auth: Auth handler or (user, pass) tuple.
+ :param cookies: dictionary or CookieJar of cookies to attach to this request.
+ :param hooks: dictionary of callback hooks, for internal usage.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> req.prepare()
+ <PreparedRequest [GET]>
+ """
+
+ def __init__(
+ self,
+ method=None,
+ url=None,
+ headers=None,
+ files=None,
+ data=None,
+ params=None,
+ auth=None,
+ cookies=None,
+ hooks=None,
+ json=None,
+ ):
+ # Default empty dicts for dict params.
+ data = [] if data is None else data
+ files = [] if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+
+ self.hooks = default_hooks()
+ for k, v in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
+
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.files = files
+ self.data = data
+ self.json = json
+ self.params = params
+ self.auth = auth
+ self.cookies = cookies
+
+ def __repr__(self):
+ return f"<Request [{self.method}]>"
+
+ def prepare(self):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
+ p = PreparedRequest()
+ p.prepare(
+ method=self.method,
+ url=self.url,
+ headers=self.headers,
+ files=self.files,
+ data=self.data,
+ json=self.json,
+ params=self.params,
+ auth=self.auth,
+ cookies=self.cookies,
+ hooks=self.hooks,
+ )
+ return p
+
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+ """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
+ containing the exact bytes that will be sent to the server.
+
+ Instances are generated from a :class:`Request <Request>` object, and
+ should not be instantiated manually; doing so may produce undesirable
+ effects.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> r = req.prepare()
+ >>> r
+ <PreparedRequest [GET]>
+
+ >>> s = requests.Session()
+ >>> s.send(r)
+ <Response [200]>
+ """
+
+ def __init__(self):
+ #: HTTP verb to send to the server.
+ self.method = None
+ #: HTTP URL to send the request to.
+ self.url = None
+ #: dictionary of HTTP headers.
+ self.headers = None
+ # The `CookieJar` used to create the Cookie header will be stored here
+ # after prepare_cookies is called
+ self._cookies = None
+ #: request body to send to the server.
+ self.body = None
+ #: dictionary of callback hooks, for internal usage.
+ self.hooks = default_hooks()
+ #: integer denoting starting position of a readable file-like body.
+ self._body_position = None
+
+ def prepare(
+ self,
+ method=None,
+ url=None,
+ headers=None,
+ files=None,
+ data=None,
+ params=None,
+ auth=None,
+ cookies=None,
+ hooks=None,
+ json=None,
+ ):
+ """Prepares the entire request with the given parameters."""
+
+ self.prepare_method(method)
+ self.prepare_url(url, params)
+ self.prepare_headers(headers)
+ self.prepare_cookies(cookies)
+ self.prepare_body(data, files, json)
+ self.prepare_auth(auth, url)
+
+ # Note that prepare_auth must be last to enable authentication schemes
+ # such as OAuth to work on a fully prepared request.
+
+ # This MUST go after prepare_auth. Authenticators could add a hook
+ self.prepare_hooks(hooks)
+
+ def __repr__(self):
+ return f"<PreparedRequest [{self.method}]>"
+
+ def copy(self):
+ p = PreparedRequest()
+ p.method = self.method
+ p.url = self.url
+ p.headers = self.headers.copy() if self.headers is not None else None
+ p._cookies = _copy_cookie_jar(self._cookies)
+ p.body = self.body
+ p.hooks = self.hooks
+ p._body_position = self._body_position
+ return p
+
+ def prepare_method(self, method):
+ """Prepares the given HTTP method."""
+ self.method = method
+ if self.method is not None:
+ self.method = to_native_string(self.method.upper())
+
+ @staticmethod
+ def _get_idna_encoded_host(host):
+ import idna
+
+ try:
+ host = idna.encode(host, uts46=True).decode("utf-8")
+ except idna.IDNAError:
+ raise UnicodeError
+ return host
+
+ def prepare_url(self, url, params):
+ """Prepares the given HTTP URL."""
+ #: Accept objects that have string representations.
+ #: We're unable to blindly call unicode/str functions
+ #: as this will include the bytestring indicator (b'')
+ #: on python 3.x.
+ #: https://github.com/psf/requests/pull/2238
+ if isinstance(url, bytes):
+ url = url.decode("utf8")
+ else:
+ url = str(url)
+
+ # Remove leading whitespaces from url
+ url = url.lstrip()
+
+ # Don't do any URL preparation for non-HTTP schemes like `mailto`,
+ # `data` etc to work around exceptions from `url_parse`, which
+ # handles RFC 3986 only.
+ if ":" in url and not url.lower().startswith("http"):
+ self.url = url
+ return
+
+ # Support for unicode domain names and paths.
+ try:
+ scheme, auth, host, port, path, query, fragment = parse_url(url)
+ except LocationParseError as e:
+ raise InvalidURL(*e.args)
+
+ if not scheme:
+ raise MissingSchema(
+ f"Invalid URL {url!r}: No scheme supplied. "
+ f"Perhaps you meant https://{url}?"
+ )
+
+ if not host:
+ raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
+
+ # In general, we want to try IDNA encoding the hostname if the string contains
+ # non-ASCII characters. This allows users to automatically get the correct IDNA
+ # behaviour. For strings containing only ASCII characters, we need to also verify
+ # it doesn't start with a wildcard (*), before allowing the unencoded hostname.
+ if not unicode_is_ascii(host):
+ try:
+ host = self._get_idna_encoded_host(host)
+ except UnicodeError:
+ raise InvalidURL("URL has an invalid label.")
+ elif host.startswith(("*", ".")):
+ raise InvalidURL("URL has an invalid label.")
+
+ # Carefully reconstruct the network location
+ netloc = auth or ""
+ if netloc:
+ netloc += "@"
+ netloc += host
+ if port:
+ netloc += f":{port}"
+
+ # Bare domains aren't valid URLs.
+ if not path:
+ path = "/"
+
+ if isinstance(params, (str, bytes)):
+ params = to_native_string(params)
+
+ enc_params = self._encode_params(params)
+ if enc_params:
+ if query:
+ query = f"{query}&{enc_params}"
+ else:
+ query = enc_params
+
+ url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
+ self.url = url
+
+ def prepare_headers(self, headers):
+ """Prepares the given HTTP headers."""
+
+ self.headers = CaseInsensitiveDict()
+ if headers:
+ for header in headers.items():
+ # Raise exception on invalid header value.
+ check_header_validity(header)
+ name, value = header
+ self.headers[to_native_string(name)] = value
+
+ def prepare_body(self, data, files, json=None):
+ """Prepares the given HTTP body data."""
+
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
+
+ # Nottin' on you.
+ body = None
+ content_type = None
+
+ if not data and json is not None:
+ # urllib3 requires a bytes-like body. Python 2's json.dumps
+ # provides this natively, but Python 3 gives a Unicode string.
+ content_type = "application/json"
+
+ try:
+ body = complexjson.dumps(json, allow_nan=False)
+ except ValueError as ve:
+ raise InvalidJSONError(ve, request=self)
+
+ if not isinstance(body, bytes):
+ body = body.encode("utf-8")
+
+ is_stream = all(
+ [
+ hasattr(data, "__iter__"),
+ not isinstance(data, (basestring, list, tuple, Mapping)),
+ ]
+ )
+
+ if is_stream:
+ try:
+ length = super_len(data)
+ except (TypeError, AttributeError, UnsupportedOperation):
+ length = None
+
+ body = data
+
+ if getattr(body, "tell", None) is not None:
+ # Record the current file position before reading.
+ # This will allow us to rewind a file in the event
+ # of a redirect.
+ try:
+ self._body_position = body.tell()
+ except OSError:
+ # This differentiates from None, allowing us to catch
+ # a failed `tell()` later when trying to rewind the body
+ self._body_position = object()
+
+ if files:
+ raise NotImplementedError(
+ "Streamed bodies and files are mutually exclusive."
+ )
+
+ if length:
+ self.headers["Content-Length"] = builtin_str(length)
+ else:
+ self.headers["Transfer-Encoding"] = "chunked"
+ else:
+ # Multi-part file uploads.
+ if files:
+ (body, content_type) = self._encode_files(files, data)
+ else:
+ if data:
+ body = self._encode_params(data)
+ if isinstance(data, basestring) or hasattr(data, "read"):
+ content_type = None
+ else:
+ content_type = "application/x-www-form-urlencoded"
+
+ self.prepare_content_length(body)
+
+ # Add content-type if it wasn't explicitly provided.
+ if content_type and ("content-type" not in self.headers):
+ self.headers["Content-Type"] = content_type
+
+ self.body = body
+
+ def prepare_content_length(self, body):
+ """Prepare Content-Length header based on request method and body"""
+ if body is not None:
+ length = super_len(body)
+ if length:
+ # If length exists, set it. Otherwise, we fallback
+ # to Transfer-Encoding: chunked.
+ self.headers["Content-Length"] = builtin_str(length)
+ elif (
+ self.method not in ("GET", "HEAD")
+ and self.headers.get("Content-Length") is None
+ ):
+ # Set Content-Length to 0 for methods that can have a body
+ # but don't provide one. (i.e. not GET or HEAD)
+ self.headers["Content-Length"] = "0"
+
+ def prepare_auth(self, auth, url=""):
+ """Prepares the given HTTP auth data."""
+
+ # If no Auth is explicitly provided, extract it from the URL first.
+ if auth is None:
+ url_auth = get_auth_from_url(self.url)
+ auth = url_auth if any(url_auth) else None
+
+ if auth:
+ if isinstance(auth, tuple) and len(auth) == 2:
+ # special-case basic HTTP auth
+ auth = HTTPBasicAuth(*auth)
+
+ # Allow auth to make its changes.
+ r = auth(self)
+
+ # Update self to reflect the auth changes.
+ self.__dict__.update(r.__dict__)
+
+ # Recompute Content-Length
+ self.prepare_content_length(self.body)
+
+ def prepare_cookies(self, cookies):
+ """Prepares the given HTTP cookie data.
+
+ This function eventually generates a ``Cookie`` header from the
+ given cookies using cookielib. Due to cookielib's design, the header
+ will not be regenerated if it already exists, meaning this function
+ can only be called once for the life of the
+ :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
+ to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
+ header is removed beforehand.
+ """
+ if isinstance(cookies, cookielib.CookieJar):
+ self._cookies = cookies
+ else:
+ self._cookies = cookiejar_from_dict(cookies)
+
+ cookie_header = get_cookie_header(self._cookies, self)
+ if cookie_header is not None:
+ self.headers["Cookie"] = cookie_header
+
+ def prepare_hooks(self, hooks):
+ """Prepares the given hooks."""
+ # hooks can be passed as None to the prepare method and to this
+ # method. To prevent iterating over None, simply use an empty list
+ # if hooks is False-y
+ hooks = hooks or []
+ for event in hooks:
+ self.register_hook(event, hooks[event])
+
+
+class Response:
+ """The :class:`Response <Response>` object, which contains a
+ server's response to an HTTP request.
+ """
+
+ __attrs__ = [
+ "_content",
+ "status_code",
+ "headers",
+ "url",
+ "history",
+ "encoding",
+ "reason",
+ "cookies",
+ "elapsed",
+ "request",
+ ]
+
+ def __init__(self):
+ self._content = False
+ self._content_consumed = False
+ self._next = None
+
+ #: Integer Code of responded HTTP Status, e.g. 404 or 200.
+ self.status_code = None
+
+ #: Case-insensitive Dictionary of Response Headers.
+ #: For example, ``headers['content-encoding']`` will return the
+ #: value of a ``'Content-Encoding'`` response header.
+ self.headers = CaseInsensitiveDict()
+
+ #: File-like object representation of response (for advanced usage).
+ #: Use of ``raw`` requires that ``stream=True`` be set on the request.
+ #: This requirement does not apply for use internally to Requests.
+ self.raw = None
+
+ #: Final URL location of Response.
+ self.url = None
+
+ #: Encoding to decode with when accessing r.text.
+ self.encoding = None
+
+ #: A list of :class:`Response <Response>` objects from
+ #: the history of the Request. Any redirect responses will end
+ #: up here. The list is sorted from the oldest to the most recent request.
+ self.history = []
+
+ #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
+ self.reason = None
+
+ #: A CookieJar of Cookies the server sent back.
+ self.cookies = cookiejar_from_dict({})
+
+ #: The amount of time elapsed between sending the request
+ #: and the arrival of the response (as a timedelta).
+ #: This property specifically measures the time taken between sending
+ #: the first byte of the request and finishing parsing the headers. It
+ #: is therefore unaffected by consuming the response content or the
+ #: value of the ``stream`` keyword argument.
+ self.elapsed = datetime.timedelta(0)
+
+ #: The :class:`PreparedRequest <PreparedRequest>` object to which this
+ #: is a response.
+ self.request = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def __getstate__(self):
+ # Consume everything; accessing the content attribute makes
+ # sure the content has been fully read.
+ if not self._content_consumed:
+ self.content
+
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+
+ def __setstate__(self, state):
+ for name, value in state.items():
+ setattr(self, name, value)
+
+ # pickled objects do not have .raw
+ setattr(self, "_content_consumed", True)
+ setattr(self, "raw", None)
+
+ def __repr__(self):
+ return f"<Response [{self.status_code}]>"
+
+ def __bool__(self):
+ """Returns True if :attr:`status_code` is less than 400.
+
+ This attribute checks if the status code of the response is between
+ 400 and 600 to see if there was a client error or a server error. If
+ the status code, is between 200 and 400, this will return True. This
+ is **not** a check to see if the response code is ``200 OK``.
+ """
+ return self.ok
+
+ def __nonzero__(self):
+ """Returns True if :attr:`status_code` is less than 400.
+
+ This attribute checks if the status code of the response is between
+ 400 and 600 to see if there was a client error or a server error. If
+ the status code, is between 200 and 400, this will return True. This
+ is **not** a check to see if the response code is ``200 OK``.
+ """
+ return self.ok
+
+ def __iter__(self):
+ """Allows you to use a response as an iterator."""
+ return self.iter_content(128)
+
+ @property
+ def ok(self):
+ """Returns True if :attr:`status_code` is less than 400, False if not.
+
+ This attribute checks if the status code of the response is between
+ 400 and 600 to see if there was a client error or a server error. If
+ the status code is between 200 and 400, this will return True. This
+ is **not** a check to see if the response code is ``200 OK``.
+ """
+ try:
+ self.raise_for_status()
+ except HTTPError:
+ return False
+ return True
+
+ @property
+ def is_redirect(self):
+ """True if this Response is a well-formed HTTP redirect that could have
+ been processed automatically (by :meth:`Session.resolve_redirects`).
+ """
+ return "location" in self.headers and self.status_code in REDIRECT_STATI
+
+ @property
+ def is_permanent_redirect(self):
+ """True if this Response one of the permanent versions of redirect."""
+ return "location" in self.headers and self.status_code in (
+ codes.moved_permanently,
+ codes.permanent_redirect,
+ )
+
+ @property
+ def next(self):
+ """Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
+ return self._next
+
+ @property
+ def apparent_encoding(self):
+ """The apparent encoding, provided by the charset_normalizer or chardet libraries."""
+ if chardet is not None:
+ return chardet.detect(self.content)["encoding"]
+ else:
+ # If no character detection library is available, we'll fall back
+ # to a standard Python utf-8 str.
+ return "utf-8"
+
+ def iter_content(self, chunk_size=1, decode_unicode=False):
+ """Iterates over the response data. When stream=True is set on the
+ request, this avoids reading the content at once into memory for
+ large responses. The chunk size is the number of bytes it should
+ read into memory. This is not necessarily the length of each item
+ returned as decoding can take place.
+
+ chunk_size must be of type int or None. A value of None will
+ function differently depending on the value of `stream`.
+ stream=True will read data as it arrives in whatever size the
+ chunks are received. If stream=False, data is returned as
+ a single chunk.
+
+ If decode_unicode is True, content will be decoded using the best
+ available encoding based on the response.
+ """
+
+ def generate():
+ # Special case for urllib3.
+ if hasattr(self.raw, "stream"):
+ try:
+ yield from self.raw.stream(chunk_size, decode_content=True)
+ except ProtocolError as e:
+ raise ChunkedEncodingError(e)
+ except DecodeError as e:
+ raise ContentDecodingError(e)
+ except ReadTimeoutError as e:
+ raise ConnectionError(e)
+ except SSLError as e:
+ raise RequestsSSLError(e)
+ else:
+ # Standard file-like object.
+ while True:
+ chunk = self.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ self._content_consumed = True
+
+ if self._content_consumed and isinstance(self._content, bool):
+ raise StreamConsumedError()
+ elif chunk_size is not None and not isinstance(chunk_size, int):
+ raise TypeError(
+ f"chunk_size must be an int, it is instead a {type(chunk_size)}."
+ )
+ # simulate reading small chunks of the content
+ reused_chunks = iter_slices(self._content, chunk_size)
+
+ stream_chunks = generate()
+
+ chunks = reused_chunks if self._content_consumed else stream_chunks
+
+ if decode_unicode:
+ chunks = stream_decode_response_unicode(chunks, self)
+
+ return chunks
+
+ def iter_lines(
+ self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
+ ):
+ """Iterates over the response data, one line at a time. When
+ stream=True is set on the request, this avoids reading the
+ content at once into memory for large responses.
+
+ .. note:: This method is not reentrant safe.
+ """
+
+ pending = None
+
+ for chunk in self.iter_content(
+ chunk_size=chunk_size, decode_unicode=decode_unicode
+ ):
+ if pending is not None:
+ chunk = pending + chunk
+
+ if delimiter:
+ lines = chunk.split(delimiter)
+ else:
+ lines = chunk.splitlines()
+
+ if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+ pending = lines.pop()
+ else:
+ pending = None
+
+ yield from lines
+
+ if pending is not None:
+ yield pending
+
+ @property
+ def content(self):
+ """Content of the response, in bytes."""
+
+ if self._content is False:
+ # Read the contents.
+ if self._content_consumed:
+ raise RuntimeError("The content for this response was already consumed")
+
+ if self.status_code == 0 or self.raw is None:
+ self._content = None
+ else:
+ self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
+
+ self._content_consumed = True
+ # don't need to release the connection; that's been handled by urllib3
+ # since we exhausted the data.
+ return self._content
+
+ @property
+ def text(self):
+ """Content of the response, in unicode.
+
+ If Response.encoding is None, encoding will be guessed using
+ ``charset_normalizer`` or ``chardet``.
+
+ The encoding of the response content is determined based solely on HTTP
+ headers, following RFC 2616 to the letter. If you can take advantage of
+ non-HTTP knowledge to make a better guess at the encoding, you should
+ set ``r.encoding`` appropriately before accessing this property.
+ """
+
+ # Try charset from content-type
+ content = None
+ encoding = self.encoding
+
+ if not self.content:
+ return ""
+
+ # Fallback to auto-detected encoding.
+ if self.encoding is None:
+ encoding = self.apparent_encoding
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(self.content, encoding, errors="replace")
+ except (LookupError, TypeError):
+ # A LookupError is raised if the encoding was not found which could
+ # indicate a misspelling or similar mistake.
+ #
+ # A TypeError can be raised if encoding is None
+ #
+ # So we try blindly encoding.
+ content = str(self.content, errors="replace")
+
+ return content
+
+ def json(self, **kwargs):
+ r"""Decodes the JSON response body (if any) as a Python object.
+
+ This may return a dictionary, list, etc. depending on what is in the response.
+
+ :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
+ :raises requests.exceptions.JSONDecodeError: If the response body does not
+ contain valid json.
+ """
+
+ if not self.encoding and self.content and len(self.content) > 3:
+ # No encoding set. JSON RFC 4627 section 3 states we should expect
+ # UTF-8, -16 or -32. Detect which one to use; If the detection or
+ # decoding fails, fall back to `self.text` (using charset_normalizer to make
+ # a best guess).
+ encoding = guess_json_utf(self.content)
+ if encoding is not None:
+ try:
+ return complexjson.loads(self.content.decode(encoding), **kwargs)
+ except UnicodeDecodeError:
+ # Wrong UTF codec detected; usually because it's not UTF-8
+ # but some other 8-bit codec. This is an RFC violation,
+ # and the server didn't bother to tell us what codec *was*
+ # used.
+ pass
+ except JSONDecodeError as e:
+ raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
+
+ try:
+ return complexjson.loads(self.text, **kwargs)
+ except JSONDecodeError as e:
+ # Catch JSON-related errors and raise as requests.JSONDecodeError
+ # This aliases json.JSONDecodeError and simplejson.JSONDecodeError
+ raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
+
+ @property
+ def links(self):
+ """Returns the parsed header links of the response, if any."""
+
+ header = self.headers.get("link")
+
+ resolved_links = {}
+
+ if header:
+ links = parse_header_links(header)
+
+ for link in links:
+ key = link.get("rel") or link.get("url")
+ resolved_links[key] = link
+
+ return resolved_links
+
+ def raise_for_status(self):
+ """Raises :class:`HTTPError`, if one occurred."""
+
+ http_error_msg = ""
+ if isinstance(self.reason, bytes):
+ # We attempt to decode utf-8 first because some servers
+ # choose to localize their reason strings. If the string
+ # isn't utf-8, we fall back to iso-8859-1 for all other
+ # encodings. (See PR #3538)
+ try:
+ reason = self.reason.decode("utf-8")
+ except UnicodeDecodeError:
+ reason = self.reason.decode("iso-8859-1")
+ else:
+ reason = self.reason
+
+ if 400 <= self.status_code < 500:
+ http_error_msg = (
+ f"{self.status_code} Client Error: {reason} for url: {self.url}"
+ )
+
+ elif 500 <= self.status_code < 600:
+ http_error_msg = (
+ f"{self.status_code} Server Error: {reason} for url: {self.url}"
+ )
+
+ if http_error_msg:
+ raise HTTPError(http_error_msg, response=self)
+
+ def close(self):
+ """Releases the connection back to the pool. Once this method has been
+ called the underlying ``raw`` object must not be accessed again.
+
+ *Note: Should not normally need to be called explicitly.*
+ """
+ if not self._content_consumed:
+ self.raw.close()
+
+ release_conn = getattr(self.raw, "release_conn", None)
+ if release_conn is not None:
+ release_conn()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/packages.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/packages.py"
new file mode 100644
index 0000000..5ab3d8e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/packages.py"
@@ -0,0 +1,23 @@
+import sys
+
+from .compat import chardet
+
+# This code exists for backwards compatibility reasons.
+# I don't like it either. Just look the other way. :)
+
+for package in ("urllib3", "idna"):
+ locals()[package] = __import__(package)
+ # This traversal is apparently necessary such that the identities are
+ # preserved (requests.packages.urllib3.* is urllib3.*)
+ for mod in list(sys.modules):
+ if mod == package or mod.startswith(f"{package}."):
+ sys.modules[f"requests.packages.{mod}"] = sys.modules[mod]
+
+if chardet is not None:
+ target = chardet.__name__
+ for mod in list(sys.modules):
+ if mod == target or mod.startswith(f"{target}."):
+ imported_mod = sys.modules[mod]
+ sys.modules[f"requests.packages.{mod}"] = imported_mod
+ mod = mod.replace(target, "chardet")
+ sys.modules[f"requests.packages.{mod}"] = imported_mod
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/sessions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/sessions.py"
new file mode 100644
index 0000000..731550d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/sessions.py"
@@ -0,0 +1,831 @@
+"""
+requests.sessions
+~~~~~~~~~~~~~~~~~
+
+This module provides a Session object to manage and persist settings across
+requests (cookies, auth, proxies).
+"""
+import os
+import sys
+import time
+from collections import OrderedDict
+from datetime import timedelta
+
+from ._internal_utils import to_native_string
+from .adapters import HTTPAdapter
+from .auth import _basic_auth_str
+from .compat import Mapping, cookielib, urljoin, urlparse
+from .cookies import (
+ RequestsCookieJar,
+ cookiejar_from_dict,
+ extract_cookies_to_jar,
+ merge_cookies,
+)
+from .exceptions import (
+ ChunkedEncodingError,
+ ContentDecodingError,
+ InvalidSchema,
+ TooManyRedirects,
+)
+from .hooks import default_hooks, dispatch_hook
+
+# formerly defined here, reexposed here for backward compatibility
+from .models import ( # noqa: F401
+ DEFAULT_REDIRECT_LIMIT,
+ REDIRECT_STATI,
+ PreparedRequest,
+ Request,
+)
+from .status_codes import codes
+from .structures import CaseInsensitiveDict
+from .utils import ( # noqa: F401
+ DEFAULT_PORTS,
+ default_headers,
+ get_auth_from_url,
+ get_environ_proxies,
+ get_netrc_auth,
+ requote_uri,
+ resolve_proxies,
+ rewind_body,
+ should_bypass_proxies,
+ to_key_val_list,
+)
+
+# Preferred clock, based on which one is more accurate on a given system.
+if sys.platform == "win32":
+ preferred_clock = time.perf_counter
+else:
+ preferred_clock = time.time
+
+
+def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
+ """Determines appropriate setting for a given request, taking into account
+ the explicit setting on that request, and the setting in the session. If a
+ setting is a dictionary, they will be merged together using `dict_class`
+ """
+
+ if session_setting is None:
+ return request_setting
+
+ if request_setting is None:
+ return session_setting
+
+ # Bypass if not a dictionary (e.g. verify)
+ if not (
+ isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping)
+ ):
+ return request_setting
+
+ merged_setting = dict_class(to_key_val_list(session_setting))
+ merged_setting.update(to_key_val_list(request_setting))
+
+ # Remove keys that are set to None. Extract keys first to avoid altering
+ # the dictionary during iteration.
+ none_keys = [k for (k, v) in merged_setting.items() if v is None]
+ for key in none_keys:
+ del merged_setting[key]
+
+ return merged_setting
+
+
+def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
+ """Properly merges both requests and session hooks.
+
+ This is necessary because when request_hooks == {'response': []}, the
+ merge breaks Session hooks entirely.
+ """
+ if session_hooks is None or session_hooks.get("response") == []:
+ return request_hooks
+
+ if request_hooks is None or request_hooks.get("response") == []:
+ return session_hooks
+
+ return merge_setting(request_hooks, session_hooks, dict_class)
+
+
+class SessionRedirectMixin:
+ def get_redirect_target(self, resp):
+ """Receives a Response. Returns a redirect URI or ``None``"""
+ # Due to the nature of how requests processes redirects this method will
+ # be called at least once upon the original response and at least twice
+ # on each subsequent redirect response (if any).
+ # If a custom mixin is used to handle this logic, it may be advantageous
+ # to cache the redirect location onto the response object as a private
+ # attribute.
+ if resp.is_redirect:
+ location = resp.headers["location"]
+ # Currently the underlying http module on py3 decode headers
+ # in latin1, but empirical evidence suggests that latin1 is very
+ # rarely used with non-ASCII characters in HTTP headers.
+ # It is more likely to get UTF8 header rather than latin1.
+ # This causes incorrect handling of UTF8 encoded location headers.
+ # To solve this, we re-encode the location in latin1.
+ location = location.encode("latin1")
+ return to_native_string(location, "utf8")
+ return None
+
+ def should_strip_auth(self, old_url, new_url):
+ """Decide whether Authorization header should be removed when redirecting"""
+ old_parsed = urlparse(old_url)
+ new_parsed = urlparse(new_url)
+ if old_parsed.hostname != new_parsed.hostname:
+ return True
+ # Special case: allow http -> https redirect when using the standard
+ # ports. This isn't specified by RFC 7235, but is kept to avoid
+ # breaking backwards compatibility with older versions of requests
+ # that allowed any redirects on the same host.
+ if (
+ old_parsed.scheme == "http"
+ and old_parsed.port in (80, None)
+ and new_parsed.scheme == "https"
+ and new_parsed.port in (443, None)
+ ):
+ return False
+
+ # Handle default port usage corresponding to scheme.
+ changed_port = old_parsed.port != new_parsed.port
+ changed_scheme = old_parsed.scheme != new_parsed.scheme
+ default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
+ if (
+ not changed_scheme
+ and old_parsed.port in default_port
+ and new_parsed.port in default_port
+ ):
+ return False
+
+ # Standard case: root URI must match
+ return changed_port or changed_scheme
+
+ def resolve_redirects(
+ self,
+ resp,
+ req,
+ stream=False,
+ timeout=None,
+ verify=True,
+ cert=None,
+ proxies=None,
+ yield_requests=False,
+ **adapter_kwargs,
+ ):
+ """Receives a Response. Returns a generator of Responses or Requests."""
+
+ hist = [] # keep track of history
+
+ url = self.get_redirect_target(resp)
+ previous_fragment = urlparse(req.url).fragment
+ while url:
+ prepared_request = req.copy()
+
+ # Update history and keep track of redirects.
+ # resp.history must ignore the original request in this loop
+ hist.append(resp)
+ resp.history = hist[1:]
+
+ try:
+ resp.content # Consume socket so it can be released
+ except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
+ resp.raw.read(decode_content=False)
+
+ if len(resp.history) >= self.max_redirects:
+ raise TooManyRedirects(
+ f"Exceeded {self.max_redirects} redirects.", response=resp
+ )
+
+ # Release the connection back into the pool.
+ resp.close()
+
+ # Handle redirection without scheme (see: RFC 1808 Section 4)
+ if url.startswith("//"):
+ parsed_rurl = urlparse(resp.url)
+ url = ":".join([to_native_string(parsed_rurl.scheme), url])
+
+ # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
+ parsed = urlparse(url)
+ if parsed.fragment == "" and previous_fragment:
+ parsed = parsed._replace(fragment=previous_fragment)
+ elif parsed.fragment:
+ previous_fragment = parsed.fragment
+ url = parsed.geturl()
+
+ # Facilitate relative 'location' headers, as allowed by RFC 7231.
+ # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
+ # Compliant with RFC3986, we percent encode the url.
+ if not parsed.netloc:
+ url = urljoin(resp.url, requote_uri(url))
+ else:
+ url = requote_uri(url)
+
+ prepared_request.url = to_native_string(url)
+
+ self.rebuild_method(prepared_request, resp)
+
+ # https://github.com/psf/requests/issues/1084
+ if resp.status_code not in (
+ codes.temporary_redirect,
+ codes.permanent_redirect,
+ ):
+ # https://github.com/psf/requests/issues/3490
+ purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding")
+ for header in purged_headers:
+ prepared_request.headers.pop(header, None)
+ prepared_request.body = None
+
+ headers = prepared_request.headers
+ headers.pop("Cookie", None)
+
+ # Extract any cookies sent on the response to the cookiejar
+ # in the new request. Because we've mutated our copied prepared
+ # request, use the old one that we haven't yet touched.
+ extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
+ merge_cookies(prepared_request._cookies, self.cookies)
+ prepared_request.prepare_cookies(prepared_request._cookies)
+
+ # Rebuild auth and proxy information.
+ proxies = self.rebuild_proxies(prepared_request, proxies)
+ self.rebuild_auth(prepared_request, resp)
+
+ # A failed tell() sets `_body_position` to `object()`. This non-None
+ # value ensures `rewindable` will be True, allowing us to raise an
+ # UnrewindableBodyError, instead of hanging the connection.
+ rewindable = prepared_request._body_position is not None and (
+ "Content-Length" in headers or "Transfer-Encoding" in headers
+ )
+
+ # Attempt to rewind consumed file-like object.
+ if rewindable:
+ rewind_body(prepared_request)
+
+ # Override the original request.
+ req = prepared_request
+
+ if yield_requests:
+ yield req
+ else:
+ resp = self.send(
+ req,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ allow_redirects=False,
+ **adapter_kwargs,
+ )
+
+ extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
+
+ # extract redirect url, if any, for the next loop
+ url = self.get_redirect_target(resp)
+ yield resp
+
+ def rebuild_auth(self, prepared_request, response):
+ """When being redirected we may want to strip authentication from the
+ request to avoid leaking credentials. This method intelligently removes
+ and reapplies authentication where possible to avoid credential loss.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+
+ if "Authorization" in headers and self.should_strip_auth(
+ response.request.url, url
+ ):
+ # If we get redirected to a new host, we should strip out any
+ # authentication headers.
+ del headers["Authorization"]
+
+ # .netrc might have more auth for us on our new host.
+ new_auth = get_netrc_auth(url) if self.trust_env else None
+ if new_auth is not None:
+ prepared_request.prepare_auth(new_auth)
+
+ def rebuild_proxies(self, prepared_request, proxies):
+ """This method re-evaluates the proxy configuration by considering the
+ environment variables. If we are redirected to a URL covered by
+ NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
+ proxy keys for this URL (in case they were stripped by a previous
+ redirect).
+
+ This method also replaces the Proxy-Authorization header where
+ necessary.
+
+ :rtype: dict
+ """
+ headers = prepared_request.headers
+ scheme = urlparse(prepared_request.url).scheme
+ new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
+
+ if "Proxy-Authorization" in headers:
+ del headers["Proxy-Authorization"]
+
+ try:
+ username, password = get_auth_from_url(new_proxies[scheme])
+ except KeyError:
+ username, password = None, None
+
+ # urllib3 handles proxy authorization for us in the standard adapter.
+ # Avoid appending this to TLS tunneled requests where it may be leaked.
+ if not scheme.startswith("https") and username and password:
+ headers["Proxy-Authorization"] = _basic_auth_str(username, password)
+
+ return new_proxies
+
+ def rebuild_method(self, prepared_request, response):
+ """When being redirected we may want to change the method of the request
+ based on certain specs or browser behavior.
+ """
+ method = prepared_request.method
+
+ # https://tools.ietf.org/html/rfc7231#section-6.4.4
+ if response.status_code == codes.see_other and method != "HEAD":
+ method = "GET"
+
+ # Do what the browsers do, despite standards...
+ # First, turn 302s into GETs.
+ if response.status_code == codes.found and method != "HEAD":
+ method = "GET"
+
+ # Second, if a POST is responded to with a 301, turn it into a GET.
+ # This bizarre behaviour is explained in Issue 1704.
+ if response.status_code == codes.moved and method == "POST":
+ method = "GET"
+
+ prepared_request.method = method
+
+
+class Session(SessionRedirectMixin):
+ """A Requests session.
+
+ Provides cookie persistence, connection-pooling, and configuration.
+
+ Basic Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> s.get('https://httpbin.org/get')
+ <Response [200]>
+
+ Or as a context manager::
+
+ >>> with requests.Session() as s:
+ ... s.get('https://httpbin.org/get')
+ <Response [200]>
+ """
+
+ __attrs__ = [
+ "headers",
+ "cookies",
+ "auth",
+ "proxies",
+ "hooks",
+ "params",
+ "verify",
+ "cert",
+ "adapters",
+ "stream",
+ "trust_env",
+ "max_redirects",
+ ]
+
+ def __init__(self):
+ #: A case-insensitive dictionary of headers to be sent on each
+ #: :class:`Request <Request>` sent from this
+ #: :class:`Session <Session>`.
+ self.headers = default_headers()
+
+ #: Default Authentication tuple or object to attach to
+ #: :class:`Request <Request>`.
+ self.auth = None
+
+ #: Dictionary mapping protocol or protocol and host to the URL of the proxy
+ #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
+ #: be used on each :class:`Request <Request>`.
+ self.proxies = {}
+
+ #: Event-handling hooks.
+ self.hooks = default_hooks()
+
+ #: Dictionary of querystring data to attach to each
+ #: :class:`Request <Request>`. The dictionary values may be lists for
+ #: representing multivalued query parameters.
+ self.params = {}
+
+ #: Stream response content default.
+ self.stream = False
+
+ #: SSL Verification default.
+ #: Defaults to `True`, requiring requests to verify the TLS certificate at the
+ #: remote end.
+ #: If verify is set to `False`, requests will accept any TLS certificate
+ #: presented by the server, and will ignore hostname mismatches and/or
+ #: expired certificates, which will make your application vulnerable to
+ #: man-in-the-middle (MitM) attacks.
+ #: Only set this to `False` for testing.
+ self.verify = True
+
+ #: SSL client certificate default, if String, path to ssl client
+ #: cert file (.pem). If Tuple, ('cert', 'key') pair.
+ self.cert = None
+
+ #: Maximum number of redirects allowed. If the request exceeds this
+ #: limit, a :class:`TooManyRedirects` exception is raised.
+ #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
+ #: 30.
+ self.max_redirects = DEFAULT_REDIRECT_LIMIT
+
+ #: Trust environment settings for proxy configuration, default
+ #: authentication and similar.
+ self.trust_env = True
+
+ #: A CookieJar containing all currently outstanding cookies set on this
+ #: session. By default it is a
+ #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
+ #: may be any other ``cookielib.CookieJar`` compatible object.
+ self.cookies = cookiejar_from_dict({})
+
+ # Default connection adapters.
+ self.adapters = OrderedDict()
+ self.mount("https://", HTTPAdapter())
+ self.mount("http://", HTTPAdapter())
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def prepare_request(self, request):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for
+ transmission and returns it. The :class:`PreparedRequest` has settings
+ merged from the :class:`Request <Request>` instance and those of the
+ :class:`Session`.
+
+ :param request: :class:`Request` instance to prepare with this
+ session's settings.
+ :rtype: requests.PreparedRequest
+ """
+ cookies = request.cookies or {}
+
+ # Bootstrap CookieJar.
+ if not isinstance(cookies, cookielib.CookieJar):
+ cookies = cookiejar_from_dict(cookies)
+
+ # Merge with session cookies
+ merged_cookies = merge_cookies(
+ merge_cookies(RequestsCookieJar(), self.cookies), cookies
+ )
+
+ # Set environment's basic authentication if not explicitly set.
+ auth = request.auth
+ if self.trust_env and not auth and not self.auth:
+ auth = get_netrc_auth(request.url)
+
+ p = PreparedRequest()
+ p.prepare(
+ method=request.method.upper(),
+ url=request.url,
+ files=request.files,
+ data=request.data,
+ json=request.json,
+ headers=merge_setting(
+ request.headers, self.headers, dict_class=CaseInsensitiveDict
+ ),
+ params=merge_setting(request.params, self.params),
+ auth=merge_setting(auth, self.auth),
+ cookies=merged_cookies,
+ hooks=merge_hooks(request.hooks, self.hooks),
+ )
+ return p
+
+ def request(
+ self,
+ method,
+ url,
+ params=None,
+ data=None,
+ headers=None,
+ cookies=None,
+ files=None,
+ auth=None,
+ timeout=None,
+ allow_redirects=True,
+ proxies=None,
+ hooks=None,
+ stream=None,
+ verify=None,
+ cert=None,
+ json=None,
+ ):
+ """Constructs a :class:`Request <Request>`, prepares it and sends it.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query
+ string for the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the
+ :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the
+ :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the
+ :class:`Request`.
+ :param files: (optional) Dictionary of ``'filename': file-like-objects``
+ for multipart encoding upload.
+ :param auth: (optional) Auth tuple or callable to enable
+ Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How many seconds to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Set to True by default.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol or protocol and
+ hostname to the URL of the proxy.
+ :param hooks: (optional) Dictionary mapping hook name to one event or
+ list of events, event must be callable.
+ :param stream: (optional) whether to immediately download the response
+ content. Defaults to ``False``.
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use. Defaults to ``True``. When set to
+ ``False``, requests will accept any TLS certificate presented by
+ the server, and will ignore hostname mismatches and/or expired
+ certificates, which will make your application vulnerable to
+ man-in-the-middle (MitM) attacks. Setting verify to ``False``
+ may be useful during local development or testing.
+ :param cert: (optional) if String, path to ssl client cert file (.pem).
+ If Tuple, ('cert', 'key') pair.
+ :rtype: requests.Response
+ """
+ # Create the Request.
+ req = Request(
+ method=method.upper(),
+ url=url,
+ headers=headers,
+ files=files,
+ data=data or {},
+ json=json,
+ params=params or {},
+ auth=auth,
+ cookies=cookies,
+ hooks=hooks,
+ )
+ prep = self.prepare_request(req)
+
+ proxies = proxies or {}
+
+ settings = self.merge_environment_settings(
+ prep.url, proxies, stream, verify, cert
+ )
+
+ # Send the request.
+ send_kwargs = {
+ "timeout": timeout,
+ "allow_redirects": allow_redirects,
+ }
+ send_kwargs.update(settings)
+ resp = self.send(prep, **send_kwargs)
+
+ return resp
+
+ def get(self, url, **kwargs):
+ r"""Sends a GET request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault("allow_redirects", True)
+ return self.request("GET", url, **kwargs)
+
+ def options(self, url, **kwargs):
+ r"""Sends a OPTIONS request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault("allow_redirects", True)
+ return self.request("OPTIONS", url, **kwargs)
+
+ def head(self, url, **kwargs):
+ r"""Sends a HEAD request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault("allow_redirects", False)
+ return self.request("HEAD", url, **kwargs)
+
+ def post(self, url, data=None, json=None, **kwargs):
+ r"""Sends a POST request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request("POST", url, data=data, json=json, **kwargs)
+
+ def put(self, url, data=None, **kwargs):
+ r"""Sends a PUT request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request("PUT", url, data=data, **kwargs)
+
+ def patch(self, url, data=None, **kwargs):
+ r"""Sends a PATCH request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request("PATCH", url, data=data, **kwargs)
+
+ def delete(self, url, **kwargs):
+ r"""Sends a DELETE request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request("DELETE", url, **kwargs)
+
+ def send(self, request, **kwargs):
+ """Send a given PreparedRequest.
+
+ :rtype: requests.Response
+ """
+ # Set defaults that the hooks can utilize to ensure they always have
+ # the correct parameters to reproduce the previous request.
+ kwargs.setdefault("stream", self.stream)
+ kwargs.setdefault("verify", self.verify)
+ kwargs.setdefault("cert", self.cert)
+ if "proxies" not in kwargs:
+ kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env)
+
+ # It's possible that users might accidentally send a Request object.
+ # Guard against that specific failure case.
+ if isinstance(request, Request):
+ raise ValueError("You can only send PreparedRequests.")
+
+ # Set up variables needed for resolve_redirects and dispatching of hooks
+ allow_redirects = kwargs.pop("allow_redirects", True)
+ stream = kwargs.get("stream")
+ hooks = request.hooks
+
+ # Get the appropriate adapter to use
+ adapter = self.get_adapter(url=request.url)
+
+ # Start time (approximately) of the request
+ start = preferred_clock()
+
+ # Send the request
+ r = adapter.send(request, **kwargs)
+
+ # Total elapsed time of the request (approximately)
+ elapsed = preferred_clock() - start
+ r.elapsed = timedelta(seconds=elapsed)
+
+ # Response manipulation hooks
+ r = dispatch_hook("response", hooks, r, **kwargs)
+
+ # Persist cookies
+ if r.history:
+ # If the hooks create history then we want those cookies too
+ for resp in r.history:
+ extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
+
+ extract_cookies_to_jar(self.cookies, request, r.raw)
+
+ # Resolve redirects if allowed.
+ if allow_redirects:
+ # Redirect resolving generator.
+ gen = self.resolve_redirects(r, request, **kwargs)
+ history = [resp for resp in gen]
+ else:
+ history = []
+
+ # Shuffle things around if there's history.
+ if history:
+ # Insert the first (original) request at the start
+ history.insert(0, r)
+ # Get the last request made
+ r = history.pop()
+ r.history = history
+
+ # If redirects aren't being followed, store the response on the Request for Response.next().
+ if not allow_redirects:
+ try:
+ r._next = next(
+ self.resolve_redirects(r, request, yield_requests=True, **kwargs)
+ )
+ except StopIteration:
+ pass
+
+ if not stream:
+ r.content
+
+ return r
+
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ """
+ Check the environment and merge it with some settings.
+
+ :rtype: dict
+ """
+ # Gather clues from the surrounding environment.
+ if self.trust_env:
+ # Set environment's proxies.
+ no_proxy = proxies.get("no_proxy") if proxies is not None else None
+ env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
+ for k, v in env_proxies.items():
+ proxies.setdefault(k, v)
+
+ # Look for requests environment configuration
+ # and be compatible with cURL.
+ if verify is True or verify is None:
+ verify = (
+ os.environ.get("REQUESTS_CA_BUNDLE")
+ or os.environ.get("CURL_CA_BUNDLE")
+ or verify
+ )
+
+ # Merge all the kwargs.
+ proxies = merge_setting(proxies, self.proxies)
+ stream = merge_setting(stream, self.stream)
+ verify = merge_setting(verify, self.verify)
+ cert = merge_setting(cert, self.cert)
+
+ return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert}
+
+ def get_adapter(self, url):
+ """
+ Returns the appropriate connection adapter for the given URL.
+
+ :rtype: requests.adapters.BaseAdapter
+ """
+ for prefix, adapter in self.adapters.items():
+ if url.lower().startswith(prefix.lower()):
+ return adapter
+
+ # Nothing matches :-/
+ raise InvalidSchema(f"No connection adapters were found for {url!r}")
+
+ def close(self):
+ """Closes all adapters and as such the session"""
+ for v in self.adapters.values():
+ v.close()
+
+ def mount(self, prefix, adapter):
+ """Registers a connection adapter to a prefix.
+
+ Adapters are sorted in descending order by prefix length.
+ """
+ self.adapters[prefix] = adapter
+ keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
+
+ for key in keys_to_move:
+ self.adapters[key] = self.adapters.pop(key)
+
+ def __getstate__(self):
+ state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
+ return state
+
+ def __setstate__(self, state):
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+
+def session():
+ """
+ Returns a :class:`Session` for context-management.
+
+ .. deprecated:: 1.0.0
+
+ This method has been deprecated since version 1.0.0 and is only kept for
+ backwards compatibility. New code should use :class:`~requests.sessions.Session`
+ to create a session. This may be removed at a future date.
+
+ :rtype: Session
+ """
+ return Session()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/status_codes.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/status_codes.py"
new file mode 100644
index 0000000..c7945a2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/status_codes.py"
@@ -0,0 +1,128 @@
+r"""
+The ``codes`` object defines a mapping from common names for HTTP statuses
+to their numerical codes, accessible either as attributes or as dictionary
+items.
+
+Example::
+
+ >>> import requests
+ >>> requests.codes['temporary_redirect']
+ 307
+ >>> requests.codes.teapot
+ 418
+ >>> requests.codes['\o/']
+ 200
+
+Some codes have multiple names, and both upper- and lower-case versions of
+the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
+``codes.okay`` all correspond to the HTTP status code 200.
+"""
+
+from .structures import LookupDict
+
+_codes = {
+ # Informational.
+ 100: ("continue",),
+ 101: ("switching_protocols",),
+ 102: ("processing", "early-hints"),
+ 103: ("checkpoint",),
+ 122: ("uri_too_long", "request_uri_too_long"),
+ 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"),
+ 201: ("created",),
+ 202: ("accepted",),
+ 203: ("non_authoritative_info", "non_authoritative_information"),
+ 204: ("no_content",),
+ 205: ("reset_content", "reset"),
+ 206: ("partial_content", "partial"),
+ 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
+ 208: ("already_reported",),
+ 226: ("im_used",),
+ # Redirection.
+ 300: ("multiple_choices",),
+ 301: ("moved_permanently", "moved", "\\o-"),
+ 302: ("found",),
+ 303: ("see_other", "other"),
+ 304: ("not_modified",),
+ 305: ("use_proxy",),
+ 306: ("switch_proxy",),
+ 307: ("temporary_redirect", "temporary_moved", "temporary"),
+ 308: (
+ "permanent_redirect",
+ "resume_incomplete",
+ "resume",
+ ), # "resume" and "resume_incomplete" to be removed in 3.0
+ # Client Error.
+ 400: ("bad_request", "bad"),
+ 401: ("unauthorized",),
+ 402: ("payment_required", "payment"),
+ 403: ("forbidden",),
+ 404: ("not_found", "-o-"),
+ 405: ("method_not_allowed", "not_allowed"),
+ 406: ("not_acceptable",),
+ 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
+ 408: ("request_timeout", "timeout"),
+ 409: ("conflict",),
+ 410: ("gone",),
+ 411: ("length_required",),
+ 412: ("precondition_failed", "precondition"),
+ 413: ("request_entity_too_large", "content_too_large"),
+ 414: ("request_uri_too_large", "uri_too_long"),
+ 415: ("unsupported_media_type", "unsupported_media", "media_type"),
+ 416: (
+ "requested_range_not_satisfiable",
+ "requested_range",
+ "range_not_satisfiable",
+ ),
+ 417: ("expectation_failed",),
+ 418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
+ 421: ("misdirected_request",),
+ 422: ("unprocessable_entity", "unprocessable", "unprocessable_content"),
+ 423: ("locked",),
+ 424: ("failed_dependency", "dependency"),
+ 425: ("unordered_collection", "unordered", "too_early"),
+ 426: ("upgrade_required", "upgrade"),
+ 428: ("precondition_required", "precondition"),
+ 429: ("too_many_requests", "too_many"),
+ 431: ("header_fields_too_large", "fields_too_large"),
+ 444: ("no_response", "none"),
+ 449: ("retry_with", "retry"),
+ 450: ("blocked_by_windows_parental_controls", "parental_controls"),
+ 451: ("unavailable_for_legal_reasons", "legal_reasons"),
+ 499: ("client_closed_request",),
+ # Server Error.
+ 500: ("internal_server_error", "server_error", "/o\\", "✗"),
+ 501: ("not_implemented",),
+ 502: ("bad_gateway",),
+ 503: ("service_unavailable", "unavailable"),
+ 504: ("gateway_timeout",),
+ 505: ("http_version_not_supported", "http_version"),
+ 506: ("variant_also_negotiates",),
+ 507: ("insufficient_storage",),
+ 509: ("bandwidth_limit_exceeded", "bandwidth"),
+ 510: ("not_extended",),
+ 511: ("network_authentication_required", "network_auth", "network_authentication"),
+}
+
+codes = LookupDict(name="status_codes")
+
+
+def _init():
+ for code, titles in _codes.items():
+ for title in titles:
+ setattr(codes, title, code)
+ if not title.startswith(("\\", "/")):
+ setattr(codes, title.upper(), code)
+
+ def doc(code):
+ names = ", ".join(f"``{n}``" for n in _codes[code])
+ return "* %d: %s" % (code, names)
+
+ global __doc__
+ __doc__ = (
+ __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
+ if __doc__ is not None
+ else None
+ )
+
+
+_init()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/structures.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/structures.py"
new file mode 100644
index 0000000..188e13e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/structures.py"
@@ -0,0 +1,99 @@
+"""
+requests.structures
+~~~~~~~~~~~~~~~~~~~
+
+Data structures that power Requests.
+"""
+
+from collections import OrderedDict
+
+from .compat import Mapping, MutableMapping
+
+
+class CaseInsensitiveDict(MutableMapping):
+ """A case-insensitive ``dict``-like object.
+
+ Implements all methods and operations of
+ ``MutableMapping`` as well as dict's ``copy``. Also
+ provides ``lower_items``.
+
+ All keys are expected to be strings. The structure remembers the
+ case of the last key to be set, and ``iter(instance)``,
+ ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
+ will contain case-sensitive keys. However, querying and contains
+ testing is case insensitive::
+
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ cid['aCCEPT'] == 'application/json' # True
+ list(cid) == ['Accept'] # True
+
+ For example, ``headers['content-encoding']`` will return the
+ value of a ``'Content-Encoding'`` response header, regardless
+ of how the header name was originally stored.
+
+ If the constructor, ``.update``, or equality comparison
+ operations are given keys that have equal ``.lower()``s, the
+ behavior is undefined.
+ """
+
+ def __init__(self, data=None, **kwargs):
+ self._store = OrderedDict()
+ if data is None:
+ data = {}
+ self.update(data, **kwargs)
+
+ def __setitem__(self, key, value):
+ # Use the lowercased key for lookups, but store the actual
+ # key alongside the value.
+ self._store[key.lower()] = (key, value)
+
+ def __getitem__(self, key):
+ return self._store[key.lower()][1]
+
+ def __delitem__(self, key):
+ del self._store[key.lower()]
+
+ def __iter__(self):
+ return (casedkey for casedkey, mappedvalue in self._store.values())
+
+ def __len__(self):
+ return len(self._store)
+
+ def lower_items(self):
+ """Like iteritems(), but with all lowercase keys."""
+ return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
+
+ def __eq__(self, other):
+ if isinstance(other, Mapping):
+ other = CaseInsensitiveDict(other)
+ else:
+ return NotImplemented
+ # Compare insensitively
+ return dict(self.lower_items()) == dict(other.lower_items())
+
+ # Copy is required
+ def copy(self):
+ return CaseInsensitiveDict(self._store.values())
+
+ def __repr__(self):
+ return str(dict(self.items()))
+
+
+class LookupDict(dict):
+ """Dictionary lookup object."""
+
+ def __init__(self, name=None):
+ self.name = name
+ super().__init__()
+
+ def __repr__(self):
+ return f"<lookup '{self.name}'>"
+
+ def __getitem__(self, key):
+ # We allow fall-through here, so values default to None
+
+ return self.__dict__.get(key, None)
+
+ def get(self, key, default=None):
+ return self.__dict__.get(key, default)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/utils.py"
new file mode 100644
index 0000000..8ab5585
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/requests/utils.py"
@@ -0,0 +1,1086 @@
+"""
+requests.utils
+~~~~~~~~~~~~~~
+
+This module provides utility functions that are used within Requests
+that are also useful for external consumption.
+"""
+
+import codecs
+import contextlib
+import io
+import os
+import re
+import socket
+import struct
+import sys
+import tempfile
+import warnings
+import zipfile
+from collections import OrderedDict
+
+from urllib3.util import make_headers, parse_url
+
+from . import certs
+from .__version__ import __version__
+
+# to_native_string is unused here, but imported here for backwards compatibility
+from ._internal_utils import ( # noqa: F401
+ _HEADER_VALIDATORS_BYTE,
+ _HEADER_VALIDATORS_STR,
+ HEADER_VALIDATORS,
+ to_native_string,
+)
+from .compat import (
+ Mapping,
+ basestring,
+ bytes,
+ getproxies,
+ getproxies_environment,
+ integer_types,
+ is_urllib3_1,
+)
+from .compat import parse_http_list as _parse_list_header
+from .compat import (
+ proxy_bypass,
+ proxy_bypass_environment,
+ quote,
+ str,
+ unquote,
+ urlparse,
+ urlunparse,
+)
+from .cookies import cookiejar_from_dict
+from .exceptions import (
+ FileModeWarning,
+ InvalidHeader,
+ InvalidURL,
+ UnrewindableBodyError,
+)
+from .structures import CaseInsensitiveDict
+
+NETRC_FILES = (".netrc", "_netrc")
+
+DEFAULT_CA_BUNDLE_PATH = certs.where()
+
+DEFAULT_PORTS = {"http": 80, "https": 443}
+
+# Ensure that ', ' is used to preserve previous delimiter behavior.
+DEFAULT_ACCEPT_ENCODING = ", ".join(
+ re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
+)
+
+
+if sys.platform == "win32":
+ # provide a proxy_bypass version on Windows without DNS lookups
+
+ def proxy_bypass_registry(host):
+ try:
+ import winreg
+ except ImportError:
+ return False
+
+ try:
+ internetSettings = winreg.OpenKey(
+ winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Internet Settings",
+ )
+ # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
+ proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0])
+ # ProxyOverride is almost always a string
+ proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0]
+ except (OSError, ValueError):
+ return False
+ if not proxyEnable or not proxyOverride:
+ return False
+
+ # make a check value list from the registry entry: replace the
+ # '<local>' string by the localhost entry and the corresponding
+ # canonical entry.
+ proxyOverride = proxyOverride.split(";")
+ # filter out empty strings to avoid re.match return true in the following code.
+ proxyOverride = filter(None, proxyOverride)
+ # now check if we match one of the registry values.
+ for test in proxyOverride:
+ if test == "<local>":
+ if "." not in host:
+ return True
+ test = test.replace(".", r"\.") # mask dots
+ test = test.replace("*", r".*") # change glob sequence
+ test = test.replace("?", r".") # change glob char
+ if re.match(test, host, re.I):
+ return True
+ return False
+
+ def proxy_bypass(host): # noqa
+ """Return True, if the host should be bypassed.
+
+ Checks proxy settings gathered from the environment, if specified,
+ or the registry.
+ """
+ if getproxies_environment():
+ return proxy_bypass_environment(host)
+ else:
+ return proxy_bypass_registry(host)
+
+
+def dict_to_sequence(d):
+ """Returns an internal sequence dictionary update."""
+
+ if hasattr(d, "items"):
+ d = d.items()
+
+ return d
+
+
+def super_len(o):
+ total_length = None
+ current_position = 0
+
+ if not is_urllib3_1 and isinstance(o, str):
+ # urllib3 2.x+ treats all strings as utf-8 instead
+ # of latin-1 (iso-8859-1) like http.client.
+ o = o.encode("utf-8")
+
+ if hasattr(o, "__len__"):
+ total_length = len(o)
+
+ elif hasattr(o, "len"):
+ total_length = o.len
+
+ elif hasattr(o, "fileno"):
+ try:
+ fileno = o.fileno()
+ except (io.UnsupportedOperation, AttributeError):
+ # AttributeError is a surprising exception, seeing as how we've just checked
+ # that `hasattr(o, 'fileno')`. It happens for objects obtained via
+ # `Tarfile.extractfile()`, per issue 5229.
+ pass
+ else:
+ total_length = os.fstat(fileno).st_size
+
+ # Having used fstat to determine the file length, we need to
+ # confirm that this file was opened up in binary mode.
+ if "b" not in o.mode:
+ warnings.warn(
+ (
+ "Requests has determined the content-length for this "
+ "request using the binary size of the file: however, the "
+ "file has been opened in text mode (i.e. without the 'b' "
+ "flag in the mode). This may lead to an incorrect "
+ "content-length. In Requests 3.0, support will be removed "
+ "for files in text mode."
+ ),
+ FileModeWarning,
+ )
+
+ if hasattr(o, "tell"):
+ try:
+ current_position = o.tell()
+ except OSError:
+ # This can happen in some weird situations, such as when the file
+ # is actually a special file descriptor like stdin. In this
+ # instance, we don't know what the length is, so set it to zero and
+ # let requests chunk it instead.
+ if total_length is not None:
+ current_position = total_length
+ else:
+ if hasattr(o, "seek") and total_length is None:
+ # StringIO and BytesIO have seek but no usable fileno
+ try:
+ # seek to end of file
+ o.seek(0, 2)
+ total_length = o.tell()
+
+ # seek back to current position to support
+ # partially read file-like objects
+ o.seek(current_position or 0)
+ except OSError:
+ total_length = 0
+
+ if total_length is None:
+ total_length = 0
+
+ return max(0, total_length - current_position)
+
+
+def get_netrc_auth(url, raise_errors=False):
+ """Returns the Requests tuple auth for a given url from netrc."""
+
+ netrc_file = os.environ.get("NETRC")
+ if netrc_file is not None:
+ netrc_locations = (netrc_file,)
+ else:
+ netrc_locations = (f"~/{f}" for f in NETRC_FILES)
+
+ try:
+ from netrc import NetrcParseError, netrc
+
+ netrc_path = None
+
+ for f in netrc_locations:
+ loc = os.path.expanduser(f)
+ if os.path.exists(loc):
+ netrc_path = loc
+ break
+
+ # Abort early if there isn't one.
+ if netrc_path is None:
+ return
+
+ ri = urlparse(url)
+ host = ri.hostname
+
+ try:
+ _netrc = netrc(netrc_path).authenticators(host)
+ if _netrc:
+ # Return with login / password
+ login_i = 0 if _netrc[0] else 1
+ return (_netrc[login_i], _netrc[2])
+ except (NetrcParseError, OSError):
+ # If there was a parsing error or a permissions issue reading the file,
+ # we'll just skip netrc auth unless explicitly asked to raise errors.
+ if raise_errors:
+ raise
+
+ # App Engine hackiness.
+ except (ImportError, AttributeError):
+ pass
+
+
+def guess_filename(obj):
+ """Tries to guess the filename of the given object."""
+ name = getattr(obj, "name", None)
+ if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">":
+ return os.path.basename(name)
+
+
+def extract_zipped_paths(path):
+ """Replace nonexistent paths that look like they refer to a member of a zip
+ archive with the location of an extracted copy of the target, or else
+ just return the provided path unchanged.
+ """
+ if os.path.exists(path):
+ # this is already a valid path, no need to do anything further
+ return path
+
+ # find the first valid part of the provided path and treat that as a zip archive
+ # assume the rest of the path is the name of a member in the archive
+ archive, member = os.path.split(path)
+ while archive and not os.path.exists(archive):
+ archive, prefix = os.path.split(archive)
+ if not prefix:
+ # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
+ # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
+ break
+ member = "/".join([prefix, member])
+
+ if not zipfile.is_zipfile(archive):
+ return path
+
+ zip_file = zipfile.ZipFile(archive)
+ if member not in zip_file.namelist():
+ return path
+
+ # we have a valid zip archive and a valid member of that archive
+ tmp = tempfile.gettempdir()
+ extracted_path = os.path.join(tmp, member.split("/")[-1])
+ if not os.path.exists(extracted_path):
+ # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
+ with atomic_open(extracted_path) as file_handler:
+ file_handler.write(zip_file.read(member))
+ return extracted_path
+
+
+@contextlib.contextmanager
+def atomic_open(filename):
+ """Write a file to the disk in an atomic fashion"""
+ tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
+ try:
+ with os.fdopen(tmp_descriptor, "wb") as tmp_handler:
+ yield tmp_handler
+ os.replace(tmp_name, filename)
+ except BaseException:
+ os.remove(tmp_name)
+ raise
+
+
+def from_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. Unless it can not be represented as such, return an
+ OrderedDict, e.g.,
+
+ ::
+
+ >>> from_key_val_list([('key', 'val')])
+ OrderedDict([('key', 'val')])
+ >>> from_key_val_list('string')
+ Traceback (most recent call last):
+ ...
+ ValueError: cannot encode objects that are not 2-tuples
+ >>> from_key_val_list({'key': 'val'})
+ OrderedDict([('key', 'val')])
+
+ :rtype: OrderedDict
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError("cannot encode objects that are not 2-tuples")
+
+ return OrderedDict(value)
+
+
+def to_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. If it can be, return a list of tuples, e.g.,
+
+ ::
+
+ >>> to_key_val_list([('key', 'val')])
+ [('key', 'val')]
+ >>> to_key_val_list({'key': 'val'})
+ [('key', 'val')]
+ >>> to_key_val_list('string')
+ Traceback (most recent call last):
+ ...
+ ValueError: cannot encode objects that are not 2-tuples
+
+ :rtype: list
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError("cannot encode objects that are not 2-tuples")
+
+ if isinstance(value, Mapping):
+ value = value.items()
+
+ return list(value)
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_list_header(value):
+ """Parse lists as described by RFC 2068 Section 2.
+
+ In particular, parse comma-separated lists where the elements of
+ the list may include quoted-strings. A quoted-string could
+ contain a comma. A non-quoted string could have quotes in the
+ middle. Quotes are removed automatically after parsing.
+
+ It basically works like :func:`parse_set_header` just that items
+ may appear multiple times and case sensitivity is preserved.
+
+ The return value is a standard :class:`list`:
+
+ >>> parse_list_header('token, "quoted value"')
+ ['token', 'quoted value']
+
+ To create a header from the :class:`list` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a list header.
+ :return: :class:`list`
+ :rtype: list
+ """
+ result = []
+ for item in _parse_list_header(value):
+ if item[:1] == item[-1:] == '"':
+ item = unquote_header_value(item[1:-1])
+ result.append(item)
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_dict_header(value):
+ """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+ convert them into a python dict:
+
+ >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+ >>> type(d) is dict
+ True
+ >>> sorted(d.items())
+ [('bar', 'as well'), ('foo', 'is a fish')]
+
+ If there is no value for a key it will be `None`:
+
+ >>> parse_dict_header('key_without_value')
+ {'key_without_value': None}
+
+ To create a header from the :class:`dict` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a dict header.
+ :return: :class:`dict`
+ :rtype: dict
+ """
+ result = {}
+ for item in _parse_list_header(value):
+ if "=" not in item:
+ result[item] = None
+ continue
+ name, value = item.split("=", 1)
+ if value[:1] == value[-1:] == '"':
+ value = unquote_header_value(value[1:-1])
+ result[name] = value
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def unquote_header_value(value, is_filename=False):
+ r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
+ This does not use the real unquoting but what browsers are actually
+ using for quoting.
+
+ :param value: the header value to unquote.
+ :rtype: str
+ """
+ if value and value[0] == value[-1] == '"':
+ # this is not the real unquoting, but fixing this so that the
+ # RFC is met will result in bugs with internet explorer and
+ # probably some other browsers as well. IE for example is
+ # uploading files with "C:\foo\bar.txt" as filename
+ value = value[1:-1]
+
+ # if this is a filename and the starting characters look like
+ # a UNC path, then just return the value without quotes. Using the
+ # replace sequence below on a UNC path has the effect of turning
+ # the leading double slash into a single slash and then
+ # _fix_ie_filename() doesn't work correctly. See #458.
+ if not is_filename or value[:2] != "\\\\":
+ return value.replace("\\\\", "\\").replace('\\"', '"')
+ return value
+
+
+def dict_from_cookiejar(cj):
+ """Returns a key/value dictionary from a CookieJar.
+
+ :param cj: CookieJar object to extract cookies from.
+ :rtype: dict
+ """
+
+ cookie_dict = {cookie.name: cookie.value for cookie in cj}
+ return cookie_dict
+
+
+def add_dict_to_cookiejar(cj, cookie_dict):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cj: CookieJar to insert cookies into.
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :rtype: CookieJar
+ """
+
+ return cookiejar_from_dict(cookie_dict, cj)
+
+
+def get_encodings_from_content(content):
+ """Returns encodings from given content string.
+
+ :param content: bytestring to extract encodings from.
+ """
+ warnings.warn(
+ (
+ "In requests 3.0, get_encodings_from_content will be removed. For "
+ "more information, please see the discussion on issue #2266. (This"
+ " warning should only appear once.)"
+ ),
+ DeprecationWarning,
+ )
+
+ charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
+ pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
+ xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
+
+ return (
+ charset_re.findall(content)
+ + pragma_re.findall(content)
+ + xml_re.findall(content)
+ )
+
+
+def _parse_content_type_header(header):
+ """Returns content type and parameters from given header
+
+ :param header: string
+ :return: tuple containing content type and dictionary of
+ parameters
+ """
+
+ tokens = header.split(";")
+ content_type, params = tokens[0].strip(), tokens[1:]
+ params_dict = {}
+ items_to_strip = "\"' "
+
+ for param in params:
+ param = param.strip()
+ if param:
+ key, value = param, True
+ index_of_equals = param.find("=")
+ if index_of_equals != -1:
+ key = param[:index_of_equals].strip(items_to_strip)
+ value = param[index_of_equals + 1 :].strip(items_to_strip)
+ params_dict[key.lower()] = value
+ return content_type, params_dict
+
+
+def get_encoding_from_headers(headers):
+ """Returns encodings from given HTTP Header Dict.
+
+ :param headers: dictionary to extract encoding from.
+ :rtype: str
+ """
+
+ content_type = headers.get("content-type")
+
+ if not content_type:
+ return None
+
+ content_type, params = _parse_content_type_header(content_type)
+
+ if "charset" in params:
+ return params["charset"].strip("'\"")
+
+ if "text" in content_type:
+ return "ISO-8859-1"
+
+ if "application/json" in content_type:
+ # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
+ return "utf-8"
+
+
+def stream_decode_response_unicode(iterator, r):
+ """Stream decodes an iterator."""
+
+ if r.encoding is None:
+ yield from iterator
+ return
+
+ decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
+ for chunk in iterator:
+ rv = decoder.decode(chunk)
+ if rv:
+ yield rv
+ rv = decoder.decode(b"", final=True)
+ if rv:
+ yield rv
+
+
+def iter_slices(string, slice_length):
+ """Iterate over slices of a string."""
+ pos = 0
+ if slice_length is None or slice_length <= 0:
+ slice_length = len(string)
+ while pos < len(string):
+ yield string[pos : pos + slice_length]
+ pos += slice_length
+
+
+def get_unicode_from_response(r):
+ """Returns the requested content back in unicode.
+
+ :param r: Response object to get unicode content from.
+
+ Tried:
+
+ 1. charset from content-type
+ 2. fall back and replace all unicode characters
+
+ :rtype: str
+ """
+ warnings.warn(
+ (
+ "In requests 3.0, get_unicode_from_response will be removed. For "
+ "more information, please see the discussion on issue #2266. (This"
+ " warning should only appear once.)"
+ ),
+ DeprecationWarning,
+ )
+
+ tried_encodings = []
+
+ # Try charset from content-type
+ encoding = get_encoding_from_headers(r.headers)
+
+ if encoding:
+ try:
+ return str(r.content, encoding)
+ except UnicodeError:
+ tried_encodings.append(encoding)
+
+ # Fall back:
+ try:
+ return str(r.content, encoding, errors="replace")
+ except TypeError:
+ return r.content
+
+
+# The unreserved URI characters (RFC 3986)
+UNRESERVED_SET = frozenset(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~"
+)
+
+
+def unquote_unreserved(uri):
+ """Un-escape any percent-escape sequences in a URI that are unreserved
+ characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
+
+ :rtype: str
+ """
+ parts = uri.split("%")
+ for i in range(1, len(parts)):
+ h = parts[i][0:2]
+ if len(h) == 2 and h.isalnum():
+ try:
+ c = chr(int(h, 16))
+ except ValueError:
+ raise InvalidURL(f"Invalid percent-escape sequence: '{h}'")
+
+ if c in UNRESERVED_SET:
+ parts[i] = c + parts[i][2:]
+ else:
+ parts[i] = f"%{parts[i]}"
+ else:
+ parts[i] = f"%{parts[i]}"
+ return "".join(parts)
+
+
+def requote_uri(uri):
+ """Re-quote the given URI.
+
+ This function passes the given URI through an unquote/quote cycle to
+ ensure that it is fully and consistently quoted.
+
+ :rtype: str
+ """
+ safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
+ safe_without_percent = "!#$&'()*+,/:;=?@[]~"
+ try:
+ # Unquote only the unreserved characters
+ # Then quote only illegal characters (do not quote reserved,
+ # unreserved, or '%')
+ return quote(unquote_unreserved(uri), safe=safe_with_percent)
+ except InvalidURL:
+ # We couldn't unquote the given URI, so let's try quoting it, but
+ # there may be unquoted '%'s in the URI. We need to make sure they're
+ # properly quoted so they do not cause issues elsewhere.
+ return quote(uri, safe=safe_without_percent)
+
+
+def address_in_network(ip, net):
+ """This function allows you to check if an IP belongs to a network subnet
+
+ Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
+ returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
+
+ :rtype: bool
+ """
+ ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0]
+ netaddr, bits = net.split("/")
+ netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0]
+ network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask
+ return (ipaddr & netmask) == (network & netmask)
+
+
+def dotted_netmask(mask):
+ """Converts mask from /xx format to xxx.xxx.xxx.xxx
+
+ Example: if mask is 24 function returns 255.255.255.0
+
+ :rtype: str
+ """
+ bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1
+ return socket.inet_ntoa(struct.pack(">I", bits))
+
+
+def is_ipv4_address(string_ip):
+ """
+ :rtype: bool
+ """
+ try:
+ socket.inet_aton(string_ip)
+ except OSError:
+ return False
+ return True
+
+
+def is_valid_cidr(string_network):
+ """
+ Very simple check of the cidr format in no_proxy variable.
+
+ :rtype: bool
+ """
+ if string_network.count("/") == 1:
+ try:
+ mask = int(string_network.split("/")[1])
+ except ValueError:
+ return False
+
+ if mask < 1 or mask > 32:
+ return False
+
+ try:
+ socket.inet_aton(string_network.split("/")[0])
+ except OSError:
+ return False
+ else:
+ return False
+ return True
+
+
+@contextlib.contextmanager
+def set_environ(env_name, value):
+ """Set the environment variable 'env_name' to 'value'
+
+ Save previous value, yield, and then restore the previous value stored in
+ the environment variable 'env_name'.
+
+ If 'value' is None, do nothing"""
+ value_changed = value is not None
+ if value_changed:
+ old_value = os.environ.get(env_name)
+ os.environ[env_name] = value
+ try:
+ yield
+ finally:
+ if value_changed:
+ if old_value is None:
+ del os.environ[env_name]
+ else:
+ os.environ[env_name] = old_value
+
+
+def should_bypass_proxies(url, no_proxy):
+ """
+ Returns whether we should bypass proxies or not.
+
+ :rtype: bool
+ """
+
+ # Prioritize lowercase environment variables over uppercase
+ # to keep a consistent behaviour with other http projects (curl, wget).
+ def get_proxy(key):
+ return os.environ.get(key) or os.environ.get(key.upper())
+
+ # First check whether no_proxy is defined. If it is, check that the URL
+ # we're getting isn't in the no_proxy list.
+ no_proxy_arg = no_proxy
+ if no_proxy is None:
+ no_proxy = get_proxy("no_proxy")
+ parsed = urlparse(url)
+
+ if parsed.hostname is None:
+ # URLs don't always have hostnames, e.g. file:/// urls.
+ return True
+
+ if no_proxy:
+ # We need to check whether we match here. We need to see if we match
+ # the end of the hostname, both with and without the port.
+ no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host)
+
+ if is_ipv4_address(parsed.hostname):
+ for proxy_ip in no_proxy:
+ if is_valid_cidr(proxy_ip):
+ if address_in_network(parsed.hostname, proxy_ip):
+ return True
+ elif parsed.hostname == proxy_ip:
+ # If no_proxy ip was defined in plain IP notation instead of cidr notation &
+ # matches the IP of the index
+ return True
+ else:
+ host_with_port = parsed.hostname
+ if parsed.port:
+ host_with_port += f":{parsed.port}"
+
+ for host in no_proxy:
+ if parsed.hostname.endswith(host) or host_with_port.endswith(host):
+ # The URL does match something in no_proxy, so we don't want
+ # to apply the proxies on this URL.
+ return True
+
+ with set_environ("no_proxy", no_proxy_arg):
+ # parsed.hostname can be `None` in cases such as a file URI.
+ try:
+ bypass = proxy_bypass(parsed.hostname)
+ except (TypeError, socket.gaierror):
+ bypass = False
+
+ if bypass:
+ return True
+
+ return False
+
+
+def get_environ_proxies(url, no_proxy=None):
+ """
+ Return a dict of environment proxies.
+
+ :rtype: dict
+ """
+ if should_bypass_proxies(url, no_proxy=no_proxy):
+ return {}
+ else:
+ return getproxies()
+
+
+def select_proxy(url, proxies):
+ """Select a proxy for the url, if applicable.
+
+ :param url: The url being for the request
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ """
+ proxies = proxies or {}
+ urlparts = urlparse(url)
+ if urlparts.hostname is None:
+ return proxies.get(urlparts.scheme, proxies.get("all"))
+
+ proxy_keys = [
+ urlparts.scheme + "://" + urlparts.hostname,
+ urlparts.scheme,
+ "all://" + urlparts.hostname,
+ "all",
+ ]
+ proxy = None
+ for proxy_key in proxy_keys:
+ if proxy_key in proxies:
+ proxy = proxies[proxy_key]
+ break
+
+ return proxy
+
+
+def resolve_proxies(request, proxies, trust_env=True):
+ """This method takes proxy information from a request and configuration
+ input to resolve a mapping of target proxies. This will consider settings
+ such as NO_PROXY to strip proxy configurations.
+
+ :param request: Request or PreparedRequest
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ :param trust_env: Boolean declaring whether to trust environment configs
+
+ :rtype: dict
+ """
+ proxies = proxies if proxies is not None else {}
+ url = request.url
+ scheme = urlparse(url).scheme
+ no_proxy = proxies.get("no_proxy")
+ new_proxies = proxies.copy()
+
+ if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
+ environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
+
+ proxy = environ_proxies.get(scheme, environ_proxies.get("all"))
+
+ if proxy:
+ new_proxies.setdefault(scheme, proxy)
+ return new_proxies
+
+
+def default_user_agent(name="python-requests"):
+ """
+ Return a string representing the default user agent.
+
+ :rtype: str
+ """
+ return f"{name}/{__version__}"
+
+
+def default_headers():
+ """
+ :rtype: requests.structures.CaseInsensitiveDict
+ """
+ return CaseInsensitiveDict(
+ {
+ "User-Agent": default_user_agent(),
+ "Accept-Encoding": DEFAULT_ACCEPT_ENCODING,
+ "Accept": "*/*",
+ "Connection": "keep-alive",
+ }
+ )
+
+
+def parse_header_links(value):
+ """Return a list of parsed link headers proxies.
+
+ i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
+
+ :rtype: list
+ """
+
+ links = []
+
+ replace_chars = " '\""
+
+ value = value.strip(replace_chars)
+ if not value:
+ return links
+
+ for val in re.split(", *<", value):
+ try:
+ url, params = val.split(";", 1)
+ except ValueError:
+ url, params = val, ""
+
+ link = {"url": url.strip("<> '\"")}
+
+ for param in params.split(";"):
+ try:
+ key, value = param.split("=")
+ except ValueError:
+ break
+
+ link[key.strip(replace_chars)] = value.strip(replace_chars)
+
+ links.append(link)
+
+ return links
+
+
+# Null bytes; no need to recreate these on each call to guess_json_utf
+_null = "\x00".encode("ascii") # encoding to ASCII for Python 3
+_null2 = _null * 2
+_null3 = _null * 3
+
+
+def guess_json_utf(data):
+ """
+ :rtype: str
+ """
+ # JSON always starts with two ASCII characters, so detection is as
+ # easy as counting the nulls and from their location and count
+ # determine the encoding. Also detect a BOM, if present.
+ sample = data[:4]
+ if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
+ return "utf-32" # BOM included
+ if sample[:3] == codecs.BOM_UTF8:
+ return "utf-8-sig" # BOM included, MS style (discouraged)
+ if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
+ return "utf-16" # BOM included
+ nullcount = sample.count(_null)
+ if nullcount == 0:
+ return "utf-8"
+ if nullcount == 2:
+ if sample[::2] == _null2: # 1st and 3rd are null
+ return "utf-16-be"
+ if sample[1::2] == _null2: # 2nd and 4th are null
+ return "utf-16-le"
+ # Did not detect 2 valid UTF-16 ascii-range characters
+ if nullcount == 3:
+ if sample[:3] == _null3:
+ return "utf-32-be"
+ if sample[1:] == _null3:
+ return "utf-32-le"
+ # Did not detect a valid UTF-32 ascii-range character
+ return None
+
+
+def prepend_scheme_if_needed(url, new_scheme):
+ """Given a URL that may or may not have a scheme, prepend the given scheme.
+ Does not replace a present scheme with the one provided as an argument.
+
+ :rtype: str
+ """
+ parsed = parse_url(url)
+ scheme, auth, host, port, path, query, fragment = parsed
+
+ # A defect in urlparse determines that there isn't a netloc present in some
+ # urls. We previously assumed parsing was overly cautious, and swapped the
+ # netloc and path. Due to a lack of tests on the original defect, this is
+ # maintained with parse_url for backwards compatibility.
+ netloc = parsed.netloc
+ if not netloc:
+ netloc, path = path, netloc
+
+ if auth:
+ # parse_url doesn't provide the netloc with auth
+ # so we'll add it ourselves.
+ netloc = "@".join([auth, netloc])
+ if scheme is None:
+ scheme = new_scheme
+ if path is None:
+ path = ""
+
+ return urlunparse((scheme, netloc, path, "", query, fragment))
+
+
+def get_auth_from_url(url):
+ """Given a url with authentication components, extract them into a tuple of
+ username,password.
+
+ :rtype: (str,str)
+ """
+ parsed = urlparse(url)
+
+ try:
+ auth = (unquote(parsed.username), unquote(parsed.password))
+ except (AttributeError, TypeError):
+ auth = ("", "")
+
+ return auth
+
+
+def check_header_validity(header):
+ """Verifies that header parts don't contain leading whitespace
+ reserved characters, or return characters.
+
+ :param header: tuple, in the format (name, value).
+ """
+ name, value = header
+ _validate_header_part(header, name, 0)
+ _validate_header_part(header, value, 1)
+
+
+def _validate_header_part(header, header_part, header_validator_index):
+ if isinstance(header_part, str):
+ validator = _HEADER_VALIDATORS_STR[header_validator_index]
+ elif isinstance(header_part, bytes):
+ validator = _HEADER_VALIDATORS_BYTE[header_validator_index]
+ else:
+ raise InvalidHeader(
+ f"Header part ({header_part!r}) from {header} "
+ f"must be of type str or bytes, not {type(header_part)}"
+ )
+
+ if not validator.match(header_part):
+ header_kind = "name" if header_validator_index == 0 else "value"
+ raise InvalidHeader(
+ f"Invalid leading whitespace, reserved character(s), or return "
+ f"character(s) in header {header_kind}: {header_part!r}"
+ )
+
+
+def urldefragauth(url):
+ """
+ Given a url remove the fragment and the authentication part.
+
+ :rtype: str
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
+ # see func:`prepend_scheme_if_needed`
+ if not netloc:
+ netloc, path = path, netloc
+
+ netloc = netloc.rsplit("@", 1)[-1]
+
+ return urlunparse((scheme, netloc, path, params, query, ""))
+
+
+def rewind_body(prepared_request):
+ """Move file pointer back to its recorded starting position
+ so it can be read again on redirect.
+ """
+ body_seek = getattr(prepared_request.body, "seek", None)
+ if body_seek is not None and isinstance(
+ prepared_request._body_position, integer_types
+ ):
+ try:
+ body_seek(prepared_request._body_position)
+ except OSError:
+ raise UnrewindableBodyError(
+ "An error occurred when rewinding request body for redirect."
+ )
+ else:
+ raise UnrewindableBodyError("Unable to rewind request body for redirect.")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/LICENCE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/LICENCE"
new file mode 100644
index 0000000..a8922b1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/LICENCE"
@@ -0,0 +1,49 @@
+`tqdm` is a product of collaborative work.
+Unless otherwise stated, all authors (see commit logs) retain copyright
+for their respective work, and release the work under the MIT licence
+(text below).
+
+Exceptions or notable authors are listed below
+in reverse chronological order:
+
+* files: *
+ MPL-2.0 2015-2024 (c) Casper da Costa-Luis
+ [casperdcl](https://github.com/casperdcl).
+* files: tqdm/_tqdm.py
+ MIT 2016 (c) [PR #96] on behalf of Google Inc.
+* files: tqdm/_tqdm.py README.rst .gitignore
+ MIT 2013 (c) Noam Yorav-Raphael, original author.
+
+[PR #96]: https://github.com/tqdm/tqdm/pull/96
+
+
+Mozilla Public Licence (MPL) v. 2.0 - Exhibit A
+-----------------------------------------------
+
+This Source Code Form is subject to the terms of the
+Mozilla Public License, v. 2.0.
+If a copy of the MPL was not distributed with this project,
+You can obtain one at https://mozilla.org/MPL/2.0/.
+
+
+MIT License (MIT)
+-----------------
+
+Copyright (c) 2013 noamraph
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/METADATA"
new file mode 100644
index 0000000..181b4dc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/METADATA"
@@ -0,0 +1,1594 @@
+Metadata-Version: 2.1
+Name: tqdm
+Version: 4.67.1
+Summary: Fast, Extensible Progress Meter
+Maintainer-email: tqdm developers <devs@tqdm.ml>
+License: MPL-2.0 AND MIT
+Project-URL: homepage, https://tqdm.github.io
+Project-URL: repository, https://github.com/tqdm/tqdm
+Project-URL: changelog, https://tqdm.github.io/releases
+Project-URL: wiki, https://github.com/tqdm/tqdm/wiki
+Keywords: progressbar,progressmeter,progress,bar,meter,rate,eta,console,terminal,time
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Environment :: MacOS X
+Classifier: Environment :: Other Environment
+Classifier: Environment :: Win32 (MS Windows)
+Classifier: Environment :: X11 Applications
+Classifier: Framework :: IPython
+Classifier: Framework :: Jupyter
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: End Users/Desktop
+Classifier: Intended Audience :: Other Audience
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: MIT License
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Operating System :: MacOS
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft
+Classifier: Operating System :: Microsoft :: MS-DOS
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: BSD :: FreeBSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: POSIX :: SunOS/Solaris
+Classifier: Operating System :: Unix
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation
+Classifier: Programming Language :: Python :: Implementation :: IronPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Unix Shell
+Classifier: Topic :: Desktop Environment
+Classifier: Topic :: Education :: Computer Aided Instruction (CAI)
+Classifier: Topic :: Education :: Testing
+Classifier: Topic :: Office/Business
+Classifier: Topic :: Other/Nonlisted Topic
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Pre-processors
+Classifier: Topic :: Software Development :: User Interfaces
+Classifier: Topic :: System :: Installation/Setup
+Classifier: Topic :: System :: Logging
+Classifier: Topic :: System :: Monitoring
+Classifier: Topic :: System :: Shells
+Classifier: Topic :: Terminals
+Classifier: Topic :: Utilities
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENCE
+Requires-Dist: colorama; platform_system == "Windows"
+Provides-Extra: dev
+Requires-Dist: pytest>=6; extra == "dev"
+Requires-Dist: pytest-cov; extra == "dev"
+Requires-Dist: pytest-timeout; extra == "dev"
+Requires-Dist: pytest-asyncio>=0.24; extra == "dev"
+Requires-Dist: nbval; extra == "dev"
+Provides-Extra: discord
+Requires-Dist: requests; extra == "discord"
+Provides-Extra: slack
+Requires-Dist: slack-sdk; extra == "slack"
+Provides-Extra: telegram
+Requires-Dist: requests; extra == "telegram"
+Provides-Extra: notebook
+Requires-Dist: ipywidgets>=6; extra == "notebook"
+
+|Logo|
+
+tqdm
+====
+
+|Py-Versions| |Versions| |Conda-Forge-Status| |Docker| |Snapcraft|
+
+|Build-Status| |Coverage-Status| |Branch-Coverage-Status| |Codacy-Grade| |Libraries-Rank| |PyPI-Downloads|
+
+|LICENCE| |OpenHub-Status| |binder-demo| |awesome-python|
+
+``tqdm`` derives from the Arabic word *taqaddum* (تقدّم) which can mean "progress,"
+and is an abbreviation for "I love you so much" in Spanish (*te quiero demasiado*).
+
+Instantly make your loops show a smart progress meter - just wrap any
+iterable with ``tqdm(iterable)``, and you're done!
+
+.. code:: python
+
+ from tqdm import tqdm
+ for i in tqdm(range(10000)):
+ ...
+
+``76%|████████████████████████ | 7568/10000 [00:33<00:10, 229.00it/s]``
+
+``trange(N)`` can be also used as a convenient shortcut for
+``tqdm(range(N))``.
+
+|Screenshot|
+ |Video| |Slides| |Merch|
+
+It can also be executed as a module with pipes:
+
+.. code:: sh
+
+ $ seq 9999999 | tqdm --bytes | wc -l
+ 75.2MB [00:00, 217MB/s]
+ 9999999
+
+ $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \
+ > backup.tgz
+ 32%|██████████▍ | 8.89G/27.9G [00:42<01:31, 223MB/s]
+
+Overhead is low -- about 60ns per iteration (80ns with ``tqdm.gui``), and is
+unit tested against performance regression.
+By comparison, the well-established
+`ProgressBar <https://github.com/niltonvolpato/python-progressbar>`__ has
+an 800ns/iter overhead.
+
+In addition to its low overhead, ``tqdm`` uses smart algorithms to predict
+the remaining time and to skip unnecessary iteration displays, which allows
+for a negligible overhead in most cases.
+
+``tqdm`` works on any platform
+(Linux, Windows, Mac, FreeBSD, NetBSD, Solaris/SunOS),
+in any console or in a GUI, and is also friendly with IPython/Jupyter notebooks.
+
+``tqdm`` does not require any dependencies (not even ``curses``!), just
+Python and an environment supporting ``carriage return \r`` and
+``line feed \n`` control characters.
+
+------------------------------------------
+
+.. contents:: Table of contents
+ :backlinks: top
+ :local:
+
+
+Installation
+------------
+
+Latest PyPI stable release
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+|Versions| |PyPI-Downloads| |Libraries-Dependents|
+
+.. code:: sh
+
+ pip install tqdm
+
+Latest development release on GitHub
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+|GitHub-Status| |GitHub-Stars| |GitHub-Commits| |GitHub-Forks| |GitHub-Updated|
+
+Pull and install pre-release ``devel`` branch:
+
+.. code:: sh
+
+ pip install "git+https://github.com/tqdm/tqdm.git@devel#egg=tqdm"
+
+Latest Conda release
+~~~~~~~~~~~~~~~~~~~~
+
+|Conda-Forge-Status|
+
+.. code:: sh
+
+ conda install -c conda-forge tqdm
+
+Latest Snapcraft release
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+|Snapcraft|
+
+There are 3 channels to choose from:
+
+.. code:: sh
+
+ snap install tqdm # implies --stable, i.e. latest tagged release
+ snap install tqdm --candidate # master branch
+ snap install tqdm --edge # devel branch
+
+Note that ``snap`` binaries are purely for CLI use (not ``import``-able), and
+automatically set up ``bash`` tab-completion.
+
+Latest Docker release
+~~~~~~~~~~~~~~~~~~~~~
+
+|Docker|
+
+.. code:: sh
+
+ docker pull tqdm/tqdm
+ docker run -i --rm tqdm/tqdm --help
+
+Other
+~~~~~
+
+There are other (unofficial) places where ``tqdm`` may be downloaded, particularly for CLI use:
+
+|Repology|
+
+.. |Repology| image:: https://repology.org/badge/tiny-repos/python:tqdm.svg
+ :target: https://repology.org/project/python:tqdm/versions
+
+Changelog
+---------
+
+The list of all changes is available either on GitHub's Releases:
+|GitHub-Status|, on the
+`wiki <https://github.com/tqdm/tqdm/wiki/Releases>`__, or on the
+`website <https://tqdm.github.io/releases>`__.
+
+
+Usage
+-----
+
+``tqdm`` is very versatile and can be used in a number of ways.
+The three main ones are given below.
+
+Iterable-based
+~~~~~~~~~~~~~~
+
+Wrap ``tqdm()`` around any iterable:
+
+.. code:: python
+
+ from tqdm import tqdm
+ from time import sleep
+
+ text = ""
+ for char in tqdm(["a", "b", "c", "d"]):
+ sleep(0.25)
+ text = text + char
+
+``trange(i)`` is a special optimised instance of ``tqdm(range(i))``:
+
+.. code:: python
+
+ from tqdm import trange
+
+ for i in trange(100):
+ sleep(0.01)
+
+Instantiation outside of the loop allows for manual control over ``tqdm()``:
+
+.. code:: python
+
+ pbar = tqdm(["a", "b", "c", "d"])
+ for char in pbar:
+ sleep(0.25)
+ pbar.set_description("Processing %s" % char)
+
+Manual
+~~~~~~
+
+Manual control of ``tqdm()`` updates using a ``with`` statement:
+
+.. code:: python
+
+ with tqdm(total=100) as pbar:
+ for i in range(10):
+ sleep(0.1)
+ pbar.update(10)
+
+If the optional variable ``total`` (or an iterable with ``len()``) is
+provided, predictive stats are displayed.
+
+``with`` is also optional (you can just assign ``tqdm()`` to a variable,
+but in this case don't forget to ``del`` or ``close()`` at the end:
+
+.. code:: python
+
+ pbar = tqdm(total=100)
+ for i in range(10):
+ sleep(0.1)
+ pbar.update(10)
+ pbar.close()
+
+Module
+~~~~~~
+
+Perhaps the most wonderful use of ``tqdm`` is in a script or on the command
+line. Simply inserting ``tqdm`` (or ``python -m tqdm``) between pipes will pass
+through all ``stdin`` to ``stdout`` while printing progress to ``stderr``.
+
+The example below demonstrate counting the number of lines in all Python files
+in the current directory, with timing information included.
+
+.. code:: sh
+
+ $ time find . -name '*.py' -type f -exec cat \{} \; | wc -l
+ 857365
+
+ real 0m3.458s
+ user 0m0.274s
+ sys 0m3.325s
+
+ $ time find . -name '*.py' -type f -exec cat \{} \; | tqdm | wc -l
+ 857366it [00:03, 246471.31it/s]
+ 857365
+
+ real 0m3.585s
+ user 0m0.862s
+ sys 0m3.358s
+
+Note that the usual arguments for ``tqdm`` can also be specified.
+
+.. code:: sh
+
+ $ find . -name '*.py' -type f -exec cat \{} \; |
+ tqdm --unit loc --unit_scale --total 857366 >> /dev/null
+ 100%|█████████████████████████████████| 857K/857K [00:04<00:00, 246Kloc/s]
+
+Backing up a large directory?
+
+.. code:: sh
+
+ $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \
+ > backup.tgz
+ 44%|██████████████▊ | 153M/352M [00:14<00:18, 11.0MB/s]
+
+This can be beautified further:
+
+.. code:: sh
+
+ $ BYTES=$(du -sb docs/ | cut -f1)
+ $ tar -cf - docs/ \
+ | tqdm --bytes --total "$BYTES" --desc Processing | gzip \
+ | tqdm --bytes --total "$BYTES" --desc Compressed --position 1 \
+ > ~/backup.tgz
+ Processing: 100%|██████████████████████| 352M/352M [00:14<00:00, 30.2MB/s]
+ Compressed: 42%|█████████▎ | 148M/352M [00:14<00:19, 10.9MB/s]
+
+Or done on a file level using 7-zip:
+
+.. code:: sh
+
+ $ 7z a -bd -r backup.7z docs/ | grep Compressing \
+ | tqdm --total $(find docs/ -type f | wc -l) --unit files \
+ | grep -v Compressing
+ 100%|██████████████████████████▉| 15327/15327 [01:00<00:00, 712.96files/s]
+
+Pre-existing CLI programs already outputting basic progress information will
+benefit from ``tqdm``'s ``--update`` and ``--update_to`` flags:
+
+.. code:: sh
+
+ $ seq 3 0.1 5 | tqdm --total 5 --update_to --null
+ 100%|████████████████████████████████████| 5.0/5 [00:00<00:00, 9673.21it/s]
+ $ seq 10 | tqdm --update --null # 1 + 2 + ... + 10 = 55 iterations
+ 55it [00:00, 90006.52it/s]
+
+FAQ and Known Issues
+--------------------
+
+|GitHub-Issues|
+
+The most common issues relate to excessive output on multiple lines, instead
+of a neat one-line progress bar.
+
+- Consoles in general: require support for carriage return (``CR``, ``\r``).
+
+ * Some cloud logging consoles which don't support ``\r`` properly
+ (`cloudwatch <https://github.com/tqdm/tqdm/issues/966>`__,
+ `K8s <https://github.com/tqdm/tqdm/issues/1319>`__) may benefit from
+ ``export TQDM_POSITION=-1``.
+
+- Nested progress bars:
+
+ * Consoles in general: require support for moving cursors up to the
+ previous line. For example,
+ `IDLE <https://github.com/tqdm/tqdm/issues/191#issuecomment-230168030>`__,
+ `ConEmu <https://github.com/tqdm/tqdm/issues/254>`__ and
+ `PyCharm <https://github.com/tqdm/tqdm/issues/203>`__ (also
+ `here <https://github.com/tqdm/tqdm/issues/208>`__,
+ `here <https://github.com/tqdm/tqdm/issues/307>`__, and
+ `here <https://github.com/tqdm/tqdm/issues/454#issuecomment-335416815>`__)
+ lack full support.
+ * Windows: additionally may require the Python module ``colorama``
+ to ensure nested bars stay within their respective lines.
+
+- Unicode:
+
+ * Environments which report that they support unicode will have solid smooth
+ progressbars. The fallback is an ``ascii``-only bar.
+ * Windows consoles often only partially support unicode and thus
+ `often require explicit ascii=True <https://github.com/tqdm/tqdm/issues/454#issuecomment-335416815>`__
+ (also `here <https://github.com/tqdm/tqdm/issues/499>`__). This is due to
+ either normal-width unicode characters being incorrectly displayed as
+ "wide", or some unicode characters not rendering.
+
+- Wrapping generators:
+
+ * Generator wrapper functions tend to hide the length of iterables.
+ ``tqdm`` does not.
+ * Replace ``tqdm(enumerate(...))`` with ``enumerate(tqdm(...))`` or
+ ``tqdm(enumerate(x), total=len(x), ...)``.
+ The same applies to ``numpy.ndenumerate``.
+ * Replace ``tqdm(zip(a, b))`` with ``zip(tqdm(a), b)`` or even
+ ``zip(tqdm(a), tqdm(b))``.
+ * The same applies to ``itertools``.
+ * Some useful convenience functions can be found under ``tqdm.contrib``.
+
+- `No intermediate output in docker-compose <https://github.com/tqdm/tqdm/issues/771>`__:
+ use ``docker-compose run`` instead of ``docker-compose up`` and ``tty: true``.
+
+- Overriding defaults via environment variables:
+ e.g. in CI/cloud jobs, ``export TQDM_MININTERVAL=5`` to avoid log spam.
+ This override logic is handled by the ``tqdm.utils.envwrap`` decorator
+ (useful independent of ``tqdm``).
+
+If you come across any other difficulties, browse and file |GitHub-Issues|.
+
+Documentation
+-------------
+
+|Py-Versions| |README-Hits| (Since 19 May 2016)
+
+.. code:: python
+
+ class tqdm():
+ """
+ Decorate an iterable object, returning an iterator which acts exactly
+ like the original iterable, but prints a dynamically updating
+ progressbar every time a value is requested.
+ """
+
+ @envwrap("TQDM_") # override defaults via env vars
+ def __init__(self, iterable=None, desc=None, total=None, leave=True,
+ file=None, ncols=None, mininterval=0.1,
+ maxinterval=10.0, miniters=None, ascii=None, disable=False,
+ unit='it', unit_scale=False, dynamic_ncols=False,
+ smoothing=0.3, bar_format=None, initial=0, position=None,
+ postfix=None, unit_divisor=1000, write_bytes=False,
+ lock_args=None, nrows=None, colour=None, delay=0):
+
+Parameters
+~~~~~~~~~~
+
+* iterable : iterable, optional
+ Iterable to decorate with a progressbar.
+ Leave blank to manually manage the updates.
+* desc : str, optional
+ Prefix for the progressbar.
+* total : int or float, optional
+ The number of expected iterations. If unspecified,
+ len(iterable) is used if possible. If float("inf") or as a last
+ resort, only basic progress statistics are displayed
+ (no ETA, no progressbar).
+ If ``gui`` is True and this parameter needs subsequent updating,
+ specify an initial arbitrary large positive number,
+ e.g. 9e9.
+* leave : bool, optional
+ If [default: True], keeps all traces of the progressbar
+ upon termination of iteration.
+ If ``None``, will leave only if ``position`` is ``0``.
+* file : ``io.TextIOWrapper`` or ``io.StringIO``, optional
+ Specifies where to output the progress messages
+ (default: sys.stderr). Uses ``file.write(str)`` and ``file.flush()``
+ methods. For encoding, see ``write_bytes``.
+* ncols : int, optional
+ The width of the entire output message. If specified,
+ dynamically resizes the progressbar to stay within this bound.
+ If unspecified, attempts to use environment width. The
+ fallback is a meter width of 10 and no limit for the counter and
+ statistics. If 0, will not print any meter (only stats).
+* mininterval : float, optional
+ Minimum progress display update interval [default: 0.1] seconds.
+* maxinterval : float, optional
+ Maximum progress display update interval [default: 10] seconds.
+ Automatically adjusts ``miniters`` to correspond to ``mininterval``
+ after long display update lag. Only works if ``dynamic_miniters``
+ or monitor thread is enabled.
+* miniters : int or float, optional
+ Minimum progress display update interval, in iterations.
+ If 0 and ``dynamic_miniters``, will automatically adjust to equal
+ ``mininterval`` (more CPU efficient, good for tight loops).
+ If > 0, will skip display of specified number of iterations.
+ Tweak this and ``mininterval`` to get very efficient loops.
+ If your progress is erratic with both fast and slow iterations
+ (network, skipping items, etc) you should set miniters=1.
+* ascii : bool or str, optional
+ If unspecified or False, use unicode (smooth blocks) to fill
+ the meter. The fallback is to use ASCII characters " 123456789#".
+* disable : bool, optional
+ Whether to disable the entire progressbar wrapper
+ [default: False]. If set to None, disable on non-TTY.
+* unit : str, optional
+ String that will be used to define the unit of each iteration
+ [default: it].
+* unit_scale : bool or int or float, optional
+ If 1 or True, the number of iterations will be reduced/scaled
+ automatically and a metric prefix following the
+ International System of Units standard will be added
+ (kilo, mega, etc.) [default: False]. If any other non-zero
+ number, will scale ``total`` and ``n``.
+* dynamic_ncols : bool, optional
+ If set, constantly alters ``ncols`` and ``nrows`` to the
+ environment (allowing for window resizes) [default: False].
+* smoothing : float, optional
+ Exponential moving average smoothing factor for speed estimates
+ (ignored in GUI mode). Ranges from 0 (average speed) to 1
+ (current/instantaneous speed) [default: 0.3].
+* bar_format : str, optional
+ Specify a custom bar string formatting. May impact performance.
+ [default: '{l_bar}{bar}{r_bar}'], where
+ l_bar='{desc}: {percentage:3.0f}%|' and
+ r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
+ '{rate_fmt}{postfix}]'
+ Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
+ percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
+ rate, rate_fmt, rate_noinv, rate_noinv_fmt,
+ rate_inv, rate_inv_fmt, postfix, unit_divisor,
+ remaining, remaining_s, eta.
+ Note that a trailing ": " is automatically removed after {desc}
+ if the latter is empty.
+* initial : int or float, optional
+ The initial counter value. Useful when restarting a progress
+ bar [default: 0]. If using float, consider specifying ``{n:.3f}``
+ or similar in ``bar_format``, or specifying ``unit_scale``.
+* position : int, optional
+ Specify the line offset to print this bar (starting from 0)
+ Automatic if unspecified.
+ Useful to manage multiple bars at once (eg, from threads).
+* postfix : dict or ``*``, optional
+ Specify additional stats to display at the end of the bar.
+ Calls ``set_postfix(**postfix)`` if possible (dict).
+* unit_divisor : float, optional
+ [default: 1000], ignored unless ``unit_scale`` is True.
+* write_bytes : bool, optional
+ Whether to write bytes. If (default: False) will write unicode.
+* lock_args : tuple, optional
+ Passed to ``refresh`` for intermediate output
+ (initialisation, iterating, and updating).
+* nrows : int, optional
+ The screen height. If specified, hides nested bars outside this
+ bound. If unspecified, attempts to use environment height.
+ The fallback is 20.
+* colour : str, optional
+ Bar colour (e.g. 'green', '#00ff00').
+* delay : float, optional
+ Don't display until [default: 0] seconds have elapsed.
+
+Extra CLI Options
+~~~~~~~~~~~~~~~~~
+
+* delim : chr, optional
+ Delimiting character [default: '\n']. Use '\0' for null.
+ N.B.: on Windows systems, Python converts '\n' to '\r\n'.
+* buf_size : int, optional
+ String buffer size in bytes [default: 256]
+ used when ``delim`` is specified.
+* bytes : bool, optional
+ If true, will count bytes, ignore ``delim``, and default
+ ``unit_scale`` to True, ``unit_divisor`` to 1024, and ``unit`` to 'B'.
+* tee : bool, optional
+ If true, passes ``stdin`` to both ``stderr`` and ``stdout``.
+* update : bool, optional
+ If true, will treat input as newly elapsed iterations,
+ i.e. numbers to pass to ``update()``. Note that this is slow
+ (~2e5 it/s) since every input must be decoded as a number.
+* update_to : bool, optional
+ If true, will treat input as total elapsed iterations,
+ i.e. numbers to assign to ``self.n``. Note that this is slow
+ (~2e5 it/s) since every input must be decoded as a number.
+* null : bool, optional
+ If true, will discard input (no stdout).
+* manpath : str, optional
+ Directory in which to install tqdm man pages.
+* comppath : str, optional
+ Directory in which to place tqdm completion.
+* log : str, optional
+ CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET.
+
+Returns
+~~~~~~~
+
+* out : decorated iterator.
+
+.. code:: python
+
+ class tqdm():
+ def update(self, n=1):
+ """
+ Manually update the progress bar, useful for streams
+ such as reading files.
+ E.g.:
+ >>> t = tqdm(total=filesize) # Initialise
+ >>> for current_buffer in stream:
+ ... ...
+ ... t.update(len(current_buffer))
+ >>> t.close()
+ The last line is highly recommended, but possibly not necessary if
+ ``t.update()`` will be called in such a way that ``filesize`` will be
+ exactly reached and printed.
+
+ Parameters
+ ----------
+ n : int or float, optional
+ Increment to add to the internal counter of iterations
+ [default: 1]. If using float, consider specifying ``{n:.3f}``
+ or similar in ``bar_format``, or specifying ``unit_scale``.
+
+ Returns
+ -------
+ out : bool or None
+ True if a ``display()`` was triggered.
+ """
+
+ def close(self):
+ """Cleanup and (if leave=False) close the progressbar."""
+
+ def clear(self, nomove=False):
+ """Clear current bar display."""
+
+ def refresh(self):
+ """
+ Force refresh the display of this bar.
+
+ Parameters
+ ----------
+ nolock : bool, optional
+ If ``True``, does not lock.
+ If [default: ``False``]: calls ``acquire()`` on internal lock.
+ lock_args : tuple, optional
+ Passed to internal lock's ``acquire()``.
+ If specified, will only ``display()`` if ``acquire()`` returns ``True``.
+ """
+
+ def unpause(self):
+ """Restart tqdm timer from last print time."""
+
+ def reset(self, total=None):
+ """
+ Resets to 0 iterations for repeated use.
+
+ Consider combining with ``leave=True``.
+
+ Parameters
+ ----------
+ total : int or float, optional. Total to use for the new bar.
+ """
+
+ def set_description(self, desc=None, refresh=True):
+ """
+ Set/modify description of the progress bar.
+
+ Parameters
+ ----------
+ desc : str, optional
+ refresh : bool, optional
+ Forces refresh [default: True].
+ """
+
+ def set_postfix(self, ordered_dict=None, refresh=True, **tqdm_kwargs):
+ """
+ Set/modify postfix (additional stats)
+ with automatic formatting based on datatype.
+
+ Parameters
+ ----------
+ ordered_dict : dict or OrderedDict, optional
+ refresh : bool, optional
+ Forces refresh [default: True].
+ kwargs : dict, optional
+ """
+
+ @classmethod
+ def write(cls, s, file=sys.stdout, end="\n"):
+ """Print a message via tqdm (without overlap with bars)."""
+
+ @property
+ def format_dict(self):
+ """Public API for read-only member access."""
+
+ def display(self, msg=None, pos=None):
+ """
+ Use ``self.sp`` to display ``msg`` in the specified ``pos``.
+
+ Consider overloading this function when inheriting to use e.g.:
+ ``self.some_frontend(**self.format_dict)`` instead of ``self.sp``.
+
+ Parameters
+ ----------
+ msg : str, optional. What to display (default: ``repr(self)``).
+ pos : int, optional. Position to ``moveto``
+ (default: ``abs(self.pos)``).
+ """
+
+ @classmethod
+ @contextmanager
+ def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs):
+ """
+ stream : file-like object.
+ method : str, "read" or "write". The result of ``read()`` and
+ the first argument of ``write()`` should have a ``len()``.
+
+ >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj:
+ ... while True:
+ ... chunk = fobj.read(chunk_size)
+ ... if not chunk:
+ ... break
+ """
+
+ @classmethod
+ def pandas(cls, *targs, **tqdm_kwargs):
+ """Registers the current `tqdm` class with `pandas`."""
+
+ def trange(*args, **tqdm_kwargs):
+ """Shortcut for `tqdm(range(*args), **tqdm_kwargs)`."""
+
+Convenience Functions
+~~~~~~~~~~~~~~~~~~~~~
+
+.. code:: python
+
+ def tqdm.contrib.tenumerate(iterable, start=0, total=None,
+ tqdm_class=tqdm.auto.tqdm, **tqdm_kwargs):
+ """Equivalent of `numpy.ndenumerate` or builtin `enumerate`."""
+
+ def tqdm.contrib.tzip(iter1, *iter2plus, **tqdm_kwargs):
+ """Equivalent of builtin `zip`."""
+
+ def tqdm.contrib.tmap(function, *sequences, **tqdm_kwargs):
+ """Equivalent of builtin `map`."""
+
+Submodules
+~~~~~~~~~~
+
+.. code:: python
+
+ class tqdm.notebook.tqdm(tqdm.tqdm):
+ """IPython/Jupyter Notebook widget."""
+
+ class tqdm.auto.tqdm(tqdm.tqdm):
+ """Automatically chooses beween `tqdm.notebook` and `tqdm.tqdm`."""
+
+ class tqdm.asyncio.tqdm(tqdm.tqdm):
+ """Asynchronous version."""
+ @classmethod
+ def as_completed(cls, fs, *, loop=None, timeout=None, total=None,
+ **tqdm_kwargs):
+ """Wrapper for `asyncio.as_completed`."""
+
+ class tqdm.gui.tqdm(tqdm.tqdm):
+ """Matplotlib GUI version."""
+
+ class tqdm.tk.tqdm(tqdm.tqdm):
+ """Tkinter GUI version."""
+
+ class tqdm.rich.tqdm(tqdm.tqdm):
+ """`rich.progress` version."""
+
+ class tqdm.keras.TqdmCallback(keras.callbacks.Callback):
+ """Keras callback for epoch and batch progress."""
+
+ class tqdm.dask.TqdmCallback(dask.callbacks.Callback):
+ """Dask callback for task progress."""
+
+
+``contrib``
++++++++++++
+
+The ``tqdm.contrib`` package also contains experimental modules:
+
+- ``tqdm.contrib.itertools``: Thin wrappers around ``itertools``
+- ``tqdm.contrib.concurrent``: Thin wrappers around ``concurrent.futures``
+- ``tqdm.contrib.slack``: Posts to `Slack <https://slack.com>`__ bots
+- ``tqdm.contrib.discord``: Posts to `Discord <https://discord.com>`__ bots
+- ``tqdm.contrib.telegram``: Posts to `Telegram <https://telegram.org>`__ bots
+- ``tqdm.contrib.bells``: Automagically enables all optional features
+
+ * ``auto``, ``pandas``, ``slack``, ``discord``, ``telegram``
+
+Examples and Advanced Usage
+---------------------------
+
+- See the `examples <https://github.com/tqdm/tqdm/tree/master/examples>`__
+ folder;
+- import the module and run ``help()``;
+- consult the `wiki <https://github.com/tqdm/tqdm/wiki>`__;
+
+ * this has an
+ `excellent article <https://github.com/tqdm/tqdm/wiki/How-to-make-a-great-Progress-Bar>`__
+ on how to make a **great** progressbar;
+
+- check out the `slides from PyData London <https://tqdm.github.io/PyData2019/slides.html>`__, or
+- run the |binder-demo|.
+
+Description and additional stats
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Custom information can be displayed and updated dynamically on ``tqdm`` bars
+with the ``desc`` and ``postfix`` arguments:
+
+.. code:: python
+
+ from tqdm import tqdm, trange
+ from random import random, randint
+ from time import sleep
+
+ with trange(10) as t:
+ for i in t:
+ # Description will be displayed on the left
+ t.set_description('GEN %i' % i)
+ # Postfix will be displayed on the right,
+ # formatted automatically based on argument's datatype
+ t.set_postfix(loss=random(), gen=randint(1,999), str='h',
+ lst=[1, 2])
+ sleep(0.1)
+
+ with tqdm(total=10, bar_format="{postfix[0]} {postfix[1][value]:>8.2g}",
+ postfix=["Batch", {"value": 0}]) as t:
+ for i in range(10):
+ sleep(0.1)
+ t.postfix[1]["value"] = i / 2
+ t.update()
+
+Points to remember when using ``{postfix[...]}`` in the ``bar_format`` string:
+
+- ``postfix`` also needs to be passed as an initial argument in a compatible
+ format, and
+- ``postfix`` will be auto-converted to a string if it is a ``dict``-like
+ object. To prevent this behaviour, insert an extra item into the dictionary
+ where the key is not a string.
+
+Additional ``bar_format`` parameters may also be defined by overriding
+``format_dict``, and the bar itself may be modified using ``ascii``:
+
+.. code:: python
+
+ from tqdm import tqdm
+ class TqdmExtraFormat(tqdm):
+ """Provides a `total_time` format parameter"""
+ @property
+ def format_dict(self):
+ d = super().format_dict
+ total_time = d["elapsed"] * (d["total"] or 0) / max(d["n"], 1)
+ d.update(total_time=self.format_interval(total_time) + " in total")
+ return d
+
+ for i in TqdmExtraFormat(
+ range(9), ascii=" .oO0",
+ bar_format="{total_time}: {percentage:.0f}%|{bar}{r_bar}"):
+ if i == 4:
+ break
+
+.. code::
+
+ 00:00 in total: 44%|0000. | 4/9 [00:00<00:00, 962.93it/s]
+
+Note that ``{bar}`` also supports a format specifier ``[width][type]``.
+
+- ``width``
+
+ * unspecified (default): automatic to fill ``ncols``
+ * ``int >= 0``: fixed width overriding ``ncols`` logic
+ * ``int < 0``: subtract from the automatic default
+
+- ``type``
+
+ * ``a``: ascii (``ascii=True`` override)
+ * ``u``: unicode (``ascii=False`` override)
+ * ``b``: blank (``ascii=" "`` override)
+
+This means a fixed bar with right-justified text may be created by using:
+``bar_format="{l_bar}{bar:10}|{bar:-10b}right-justified"``
+
+Nested progress bars
+~~~~~~~~~~~~~~~~~~~~
+
+``tqdm`` supports nested progress bars. Here's an example:
+
+.. code:: python
+
+ from tqdm.auto import trange
+ from time import sleep
+
+ for i in trange(4, desc='1st loop'):
+ for j in trange(5, desc='2nd loop'):
+ for k in trange(50, desc='3rd loop', leave=False):
+ sleep(0.01)
+
+For manual control over positioning (e.g. for multi-processing use),
+you may specify ``position=n`` where ``n=0`` for the outermost bar,
+``n=1`` for the next, and so on.
+However, it's best to check if ``tqdm`` can work without manual ``position``
+first.
+
+.. code:: python
+
+ from time import sleep
+ from tqdm import trange, tqdm
+ from multiprocessing import Pool, RLock, freeze_support
+
+ L = list(range(9))
+
+ def progresser(n):
+ interval = 0.001 / (n + 2)
+ total = 5000
+ text = f"#{n}, est. {interval * total:<04.2}s"
+ for _ in trange(total, desc=text, position=n):
+ sleep(interval)
+
+ if __name__ == '__main__':
+ freeze_support() # for Windows support
+ tqdm.set_lock(RLock()) # for managing output contention
+ p = Pool(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),))
+ p.map(progresser, L)
+
+Note that in Python 3, ``tqdm.write`` is thread-safe:
+
+.. code:: python
+
+ from time import sleep
+ from tqdm import tqdm, trange
+ from concurrent.futures import ThreadPoolExecutor
+
+ L = list(range(9))
+
+ def progresser(n):
+ interval = 0.001 / (n + 2)
+ total = 5000
+ text = f"#{n}, est. {interval * total:<04.2}s"
+ for _ in trange(total, desc=text):
+ sleep(interval)
+ if n == 6:
+ tqdm.write("n == 6 completed.")
+ tqdm.write("`tqdm.write()` is thread-safe in py3!")
+
+ if __name__ == '__main__':
+ with ThreadPoolExecutor() as p:
+ p.map(progresser, L)
+
+Hooks and callbacks
+~~~~~~~~~~~~~~~~~~~
+
+``tqdm`` can easily support callbacks/hooks and manual updates.
+Here's an example with ``urllib``:
+
+**``urllib.urlretrieve`` documentation**
+
+ | [...]
+ | If present, the hook function will be called once
+ | on establishment of the network connection and once after each block read
+ | thereafter. The hook will be passed three arguments; a count of blocks
+ | transferred so far, a block size in bytes, and the total size of the file.
+ | [...]
+
+.. code:: python
+
+ import urllib, os
+ from tqdm import tqdm
+ urllib = getattr(urllib, 'request', urllib)
+
+ class TqdmUpTo(tqdm):
+ """Provides `update_to(n)` which uses `tqdm.update(delta_n)`."""
+ def update_to(self, b=1, bsize=1, tsize=None):
+ """
+ b : int, optional
+ Number of blocks transferred so far [default: 1].
+ bsize : int, optional
+ Size of each block (in tqdm units) [default: 1].
+ tsize : int, optional
+ Total size (in tqdm units). If [default: None] remains unchanged.
+ """
+ if tsize is not None:
+ self.total = tsize
+ return self.update(b * bsize - self.n) # also sets self.n = b * bsize
+
+ eg_link = "https://caspersci.uk.to/matryoshka.zip"
+ with TqdmUpTo(unit='B', unit_scale=True, unit_divisor=1024, miniters=1,
+ desc=eg_link.split('/')[-1]) as t: # all optional kwargs
+ urllib.urlretrieve(eg_link, filename=os.devnull,
+ reporthook=t.update_to, data=None)
+ t.total = t.n
+
+Inspired by `twine#242 <https://github.com/pypa/twine/pull/242>`__.
+Functional alternative in
+`examples/tqdm_wget.py <https://github.com/tqdm/tqdm/blob/master/examples/tqdm_wget.py>`__.
+
+It is recommend to use ``miniters=1`` whenever there is potentially
+large differences in iteration speed (e.g. downloading a file over
+a patchy connection).
+
+**Wrapping read/write methods**
+
+To measure throughput through a file-like object's ``read`` or ``write``
+methods, use ``CallbackIOWrapper``:
+
+.. code:: python
+
+ from tqdm.auto import tqdm
+ from tqdm.utils import CallbackIOWrapper
+
+ with tqdm(total=file_obj.size,
+ unit='B', unit_scale=True, unit_divisor=1024) as t:
+ fobj = CallbackIOWrapper(t.update, file_obj, "read")
+ while True:
+ chunk = fobj.read(chunk_size)
+ if not chunk:
+ break
+ t.reset()
+ # ... continue to use `t` for something else
+
+Alternatively, use the even simpler ``wrapattr`` convenience function,
+which would condense both the ``urllib`` and ``CallbackIOWrapper`` examples
+down to:
+
+.. code:: python
+
+ import urllib, os
+ from tqdm import tqdm
+
+ eg_link = "https://caspersci.uk.to/matryoshka.zip"
+ response = getattr(urllib, 'request', urllib).urlopen(eg_link)
+ with tqdm.wrapattr(open(os.devnull, "wb"), "write",
+ miniters=1, desc=eg_link.split('/')[-1],
+ total=getattr(response, 'length', None)) as fout:
+ for chunk in response:
+ fout.write(chunk)
+
+The ``requests`` equivalent is nearly identical:
+
+.. code:: python
+
+ import requests, os
+ from tqdm import tqdm
+
+ eg_link = "https://caspersci.uk.to/matryoshka.zip"
+ response = requests.get(eg_link, stream=True)
+ with tqdm.wrapattr(open(os.devnull, "wb"), "write",
+ miniters=1, desc=eg_link.split('/')[-1],
+ total=int(response.headers.get('content-length', 0))) as fout:
+ for chunk in response.iter_content(chunk_size=4096):
+ fout.write(chunk)
+
+**Custom callback**
+
+``tqdm`` is known for intelligently skipping unnecessary displays. To make a
+custom callback take advantage of this, simply use the return value of
+``update()``. This is set to ``True`` if a ``display()`` was triggered.
+
+.. code:: python
+
+ from tqdm.auto import tqdm as std_tqdm
+
+ def external_callback(*args, **kwargs):
+ ...
+
+ class TqdmExt(std_tqdm):
+ def update(self, n=1):
+ displayed = super().update(n)
+ if displayed:
+ external_callback(**self.format_dict)
+ return displayed
+
+``asyncio``
+~~~~~~~~~~~
+
+Note that ``break`` isn't currently caught by asynchronous iterators.
+This means that ``tqdm`` cannot clean up after itself in this case:
+
+.. code:: python
+
+ from tqdm.asyncio import tqdm
+
+ async for i in tqdm(range(9)):
+ if i == 2:
+ break
+
+Instead, either call ``pbar.close()`` manually or use the context manager syntax:
+
+.. code:: python
+
+ from tqdm.asyncio import tqdm
+
+ with tqdm(range(9)) as pbar:
+ async for i in pbar:
+ if i == 2:
+ break
+
+Pandas Integration
+~~~~~~~~~~~~~~~~~~
+
+Due to popular demand we've added support for ``pandas`` -- here's an example
+for ``DataFrame.progress_apply`` and ``DataFrameGroupBy.progress_apply``:
+
+.. code:: python
+
+ import pandas as pd
+ import numpy as np
+ from tqdm import tqdm
+
+ df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
+
+ # Register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm`
+ # (can use `tqdm.gui.tqdm`, `tqdm.notebook.tqdm`, optional kwargs, etc.)
+ tqdm.pandas(desc="my bar!")
+
+ # Now you can use `progress_apply` instead of `apply`
+ # and `progress_map` instead of `map`
+ df.progress_apply(lambda x: x**2)
+ # can also groupby:
+ # df.groupby(0).progress_apply(lambda x: x**2)
+
+In case you're interested in how this works (and how to modify it for your
+own callbacks), see the
+`examples <https://github.com/tqdm/tqdm/tree/master/examples>`__
+folder or import the module and run ``help()``.
+
+Keras Integration
+~~~~~~~~~~~~~~~~~
+
+A ``keras`` callback is also available:
+
+.. code:: python
+
+ from tqdm.keras import TqdmCallback
+
+ ...
+
+ model.fit(..., verbose=0, callbacks=[TqdmCallback()])
+
+Dask Integration
+~~~~~~~~~~~~~~~~
+
+A ``dask`` callback is also available:
+
+.. code:: python
+
+ from tqdm.dask import TqdmCallback
+
+ with TqdmCallback(desc="compute"):
+ ...
+ arr.compute()
+
+ # or use callback globally
+ cb = TqdmCallback(desc="global")
+ cb.register()
+ arr.compute()
+
+IPython/Jupyter Integration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+IPython/Jupyter is supported via the ``tqdm.notebook`` submodule:
+
+.. code:: python
+
+ from tqdm.notebook import trange, tqdm
+ from time import sleep
+
+ for i in trange(3, desc='1st loop'):
+ for j in tqdm(range(100), desc='2nd loop'):
+ sleep(0.01)
+
+In addition to ``tqdm`` features, the submodule provides a native Jupyter
+widget (compatible with IPython v1-v4 and Jupyter), fully working nested bars
+and colour hints (blue: normal, green: completed, red: error/interrupt,
+light blue: no ETA); as demonstrated below.
+
+|Screenshot-Jupyter1|
+|Screenshot-Jupyter2|
+|Screenshot-Jupyter3|
+
+The ``notebook`` version supports percentage or pixels for overall width
+(e.g.: ``ncols='100%'`` or ``ncols='480px'``).
+
+It is also possible to let ``tqdm`` automatically choose between
+console or notebook versions by using the ``autonotebook`` submodule:
+
+.. code:: python
+
+ from tqdm.autonotebook import tqdm
+ tqdm.pandas()
+
+Note that this will issue a ``TqdmExperimentalWarning`` if run in a notebook
+since it is not meant to be possible to distinguish between ``jupyter notebook``
+and ``jupyter console``. Use ``auto`` instead of ``autonotebook`` to suppress
+this warning.
+
+Note that notebooks will display the bar in the cell where it was created.
+This may be a different cell from the one where it is used.
+If this is not desired, either
+
+- delay the creation of the bar to the cell where it must be displayed, or
+- create the bar with ``display=False``, and in a later cell call
+ ``display(bar.container)``:
+
+.. code:: python
+
+ from tqdm.notebook import tqdm
+ pbar = tqdm(..., display=False)
+
+.. code:: python
+
+ # different cell
+ display(pbar.container)
+
+The ``keras`` callback has a ``display()`` method which can be used likewise:
+
+.. code:: python
+
+ from tqdm.keras import TqdmCallback
+ cbk = TqdmCallback(display=False)
+
+.. code:: python
+
+ # different cell
+ cbk.display()
+ model.fit(..., verbose=0, callbacks=[cbk])
+
+Another possibility is to have a single bar (near the top of the notebook)
+which is constantly re-used (using ``reset()`` rather than ``close()``).
+For this reason, the notebook version (unlike the CLI version) does not
+automatically call ``close()`` upon ``Exception``.
+
+.. code:: python
+
+ from tqdm.notebook import tqdm
+ pbar = tqdm()
+
+.. code:: python
+
+ # different cell
+ iterable = range(100)
+ pbar.reset(total=len(iterable)) # initialise with new `total`
+ for i in iterable:
+ pbar.update()
+ pbar.refresh() # force print final status but don't `close()`
+
+Custom Integration
+~~~~~~~~~~~~~~~~~~
+
+To change the default arguments (such as making ``dynamic_ncols=True``),
+simply use built-in Python magic:
+
+.. code:: python
+
+ from functools import partial
+ from tqdm import tqdm as std_tqdm
+ tqdm = partial(std_tqdm, dynamic_ncols=True)
+
+For further customisation,
+``tqdm`` may be inherited from to create custom callbacks (as with the
+``TqdmUpTo`` example `above <#hooks-and-callbacks>`__) or for custom frontends
+(e.g. GUIs such as notebook or plotting packages). In the latter case:
+
+1. ``def __init__()`` to call ``super().__init__(..., gui=True)`` to disable
+ terminal ``status_printer`` creation.
+2. Redefine: ``close()``, ``clear()``, ``display()``.
+
+Consider overloading ``display()`` to use e.g.
+``self.frontend(**self.format_dict)`` instead of ``self.sp(repr(self))``.
+
+Some submodule examples of inheritance:
+
+- `tqdm/notebook.py <https://github.com/tqdm/tqdm/blob/master/tqdm/notebook.py>`__
+- `tqdm/gui.py <https://github.com/tqdm/tqdm/blob/master/tqdm/gui.py>`__
+- `tqdm/tk.py <https://github.com/tqdm/tqdm/blob/master/tqdm/tk.py>`__
+- `tqdm/contrib/slack.py <https://github.com/tqdm/tqdm/blob/master/tqdm/contrib/slack.py>`__
+- `tqdm/contrib/discord.py <https://github.com/tqdm/tqdm/blob/master/tqdm/contrib/discord.py>`__
+- `tqdm/contrib/telegram.py <https://github.com/tqdm/tqdm/blob/master/tqdm/contrib/telegram.py>`__
+
+Dynamic Monitor/Meter
+~~~~~~~~~~~~~~~~~~~~~
+
+You can use a ``tqdm`` as a meter which is not monotonically increasing.
+This could be because ``n`` decreases (e.g. a CPU usage monitor) or ``total``
+changes.
+
+One example would be recursively searching for files. The ``total`` is the
+number of objects found so far, while ``n`` is the number of those objects which
+are files (rather than folders):
+
+.. code:: python
+
+ from tqdm import tqdm
+ import os.path
+
+ def find_files_recursively(path, show_progress=True):
+ files = []
+ # total=1 assumes `path` is a file
+ t = tqdm(total=1, unit="file", disable=not show_progress)
+ if not os.path.exists(path):
+ raise IOError("Cannot find:" + path)
+
+ def append_found_file(f):
+ files.append(f)
+ t.update()
+
+ def list_found_dir(path):
+ """returns os.listdir(path) assuming os.path.isdir(path)"""
+ listing = os.listdir(path)
+ # subtract 1 since a "file" we found was actually this directory
+ t.total += len(listing) - 1
+ # fancy way to give info without forcing a refresh
+ t.set_postfix(dir=path[-10:], refresh=False)
+ t.update(0) # may trigger a refresh
+ return listing
+
+ def recursively_search(path):
+ if os.path.isdir(path):
+ for f in list_found_dir(path):
+ recursively_search(os.path.join(path, f))
+ else:
+ append_found_file(path)
+
+ recursively_search(path)
+ t.set_postfix(dir=path)
+ t.close()
+ return files
+
+Using ``update(0)`` is a handy way to let ``tqdm`` decide when to trigger a
+display refresh to avoid console spamming.
+
+Writing messages
+~~~~~~~~~~~~~~~~
+
+This is a work in progress (see
+`#737 <https://github.com/tqdm/tqdm/issues/737>`__).
+
+Since ``tqdm`` uses a simple printing mechanism to display progress bars,
+you should not write any message in the terminal using ``print()`` while
+a progressbar is open.
+
+To write messages in the terminal without any collision with ``tqdm`` bar
+display, a ``.write()`` method is provided:
+
+.. code:: python
+
+ from tqdm.auto import tqdm, trange
+ from time import sleep
+
+ bar = trange(10)
+ for i in bar:
+ # Print using tqdm class method .write()
+ sleep(0.1)
+ if not (i % 3):
+ tqdm.write("Done task %i" % i)
+ # Can also use bar.write()
+
+By default, this will print to standard output ``sys.stdout``. but you can
+specify any file-like object using the ``file`` argument. For example, this
+can be used to redirect the messages writing to a log file or class.
+
+Redirecting writing
+~~~~~~~~~~~~~~~~~~~
+
+If using a library that can print messages to the console, editing the library
+by replacing ``print()`` with ``tqdm.write()`` may not be desirable.
+In that case, redirecting ``sys.stdout`` to ``tqdm.write()`` is an option.
+
+To redirect ``sys.stdout``, create a file-like class that will write
+any input string to ``tqdm.write()``, and supply the arguments
+``file=sys.stdout, dynamic_ncols=True``.
+
+A reusable canonical example is given below:
+
+.. code:: python
+
+ from time import sleep
+ import contextlib
+ import sys
+ from tqdm import tqdm
+ from tqdm.contrib import DummyTqdmFile
+
+
+ @contextlib.contextmanager
+ def std_out_err_redirect_tqdm():
+ orig_out_err = sys.stdout, sys.stderr
+ try:
+ sys.stdout, sys.stderr = map(DummyTqdmFile, orig_out_err)
+ yield orig_out_err[0]
+ # Relay exceptions
+ except Exception as exc:
+ raise exc
+ # Always restore sys.stdout/err if necessary
+ finally:
+ sys.stdout, sys.stderr = orig_out_err
+
+ def some_fun(i):
+ print("Fee, fi, fo,".split()[i])
+
+ # Redirect stdout to tqdm.write() (don't forget the `as save_stdout`)
+ with std_out_err_redirect_tqdm() as orig_stdout:
+ # tqdm needs the original stdout
+ # and dynamic_ncols=True to autodetect console width
+ for i in tqdm(range(3), file=orig_stdout, dynamic_ncols=True):
+ sleep(.5)
+ some_fun(i)
+
+ # After the `with`, printing is restored
+ print("Done!")
+
+Redirecting ``logging``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Similar to ``sys.stdout``/``sys.stderr`` as detailed above, console ``logging``
+may also be redirected to ``tqdm.write()``.
+
+Warning: if also redirecting ``sys.stdout``/``sys.stderr``, make sure to
+redirect ``logging`` first if needed.
+
+Helper methods are available in ``tqdm.contrib.logging``. For example:
+
+.. code:: python
+
+ import logging
+ from tqdm import trange
+ from tqdm.contrib.logging import logging_redirect_tqdm
+
+ LOG = logging.getLogger(__name__)
+
+ if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO)
+ with logging_redirect_tqdm():
+ for i in trange(9):
+ if i == 4:
+ LOG.info("console logging redirected to `tqdm.write()`")
+ # logging restored
+
+Monitoring thread, intervals and miniters
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``tqdm`` implements a few tricks to increase efficiency and reduce overhead.
+
+- Avoid unnecessary frequent bar refreshing: ``mininterval`` defines how long
+ to wait between each refresh. ``tqdm`` always gets updated in the background,
+ but it will display only every ``mininterval``.
+- Reduce number of calls to check system clock/time.
+- ``mininterval`` is more intuitive to configure than ``miniters``.
+ A clever adjustment system ``dynamic_miniters`` will automatically adjust
+ ``miniters`` to the amount of iterations that fit into time ``mininterval``.
+ Essentially, ``tqdm`` will check if it's time to print without actually
+ checking time. This behaviour can be still be bypassed by manually setting
+ ``miniters``.
+
+However, consider a case with a combination of fast and slow iterations.
+After a few fast iterations, ``dynamic_miniters`` will set ``miniters`` to a
+large number. When iteration rate subsequently slows, ``miniters`` will
+remain large and thus reduce display update frequency. To address this:
+
+- ``maxinterval`` defines the maximum time between display refreshes.
+ A concurrent monitoring thread checks for overdue updates and forces one
+ where necessary.
+
+The monitoring thread should not have a noticeable overhead, and guarantees
+updates at least every 10 seconds by default.
+This value can be directly changed by setting the ``monitor_interval`` of
+any ``tqdm`` instance (i.e. ``t = tqdm.tqdm(...); t.monitor_interval = 2``).
+The monitor thread may be disabled application-wide by setting
+``tqdm.tqdm.monitor_interval = 0`` before instantiation of any ``tqdm`` bar.
+
+
+Merch
+-----
+
+You can buy `tqdm branded merch <https://tqdm.github.io/merch>`__ now!
+
+Contributions
+-------------
+
+|GitHub-Commits| |GitHub-Issues| |GitHub-PRs| |OpenHub-Status| |GitHub-Contributions| |CII Best Practices|
+
+All source code is hosted on `GitHub <https://github.com/tqdm/tqdm>`__.
+Contributions are welcome.
+
+See the
+`CONTRIBUTING <https://github.com/tqdm/tqdm/blob/master/CONTRIBUTING.md>`__
+file for more information.
+
+Developers who have made significant contributions, ranked by *SLoC*
+(surviving lines of code,
+`git fame <https://github.com/casperdcl/git-fame>`__ ``-wMC --excl '\.(png|gif|jpg)$'``),
+are:
+
+==================== ======================================================== ==== ================================
+Name ID SLoC Notes
+==================== ======================================================== ==== ================================
+Casper da Costa-Luis `casperdcl <https://github.com/casperdcl>`__ ~80% primary maintainer |Gift-Casper|
+Stephen Larroque `lrq3000 <https://github.com/lrq3000>`__ ~9% team member
+Martin Zugnoni `martinzugnoni <https://github.com/martinzugnoni>`__ ~3%
+Daniel Ecer `de-code <https://github.com/de-code>`__ ~2%
+Richard Sheridan `richardsheridan <https://github.com/richardsheridan>`__ ~1%
+Guangshuo Chen `chengs <https://github.com/chengs>`__ ~1%
+Helio Machado `0x2b3bfa0 <https://github.com/0x2b3bfa0>`__ ~1%
+Kyle Altendorf `altendky <https://github.com/altendky>`__ <1%
+Noam Yorav-Raphael `noamraph <https://github.com/noamraph>`__ <1% original author
+Matthew Stevens `mjstevens777 <https://github.com/mjstevens777>`__ <1%
+Hadrien Mary `hadim <https://github.com/hadim>`__ <1% team member
+Mikhail Korobov `kmike <https://github.com/kmike>`__ <1% team member
+==================== ======================================================== ==== ================================
+
+Ports to Other Languages
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+A list is available on
+`this wiki page <https://github.com/tqdm/tqdm/wiki/tqdm-ports>`__.
+
+
+LICENCE
+-------
+
+Open Source (OSI approved): |LICENCE|
+
+Citation information: |DOI|
+
+|README-Hits| (Since 19 May 2016)
+
+.. |Logo| image:: https://tqdm.github.io/img/logo.gif
+.. |Screenshot| image:: https://tqdm.github.io/img/tqdm.gif
+.. |Video| image:: https://tqdm.github.io/img/video.jpg
+ :target: https://tqdm.github.io/video
+.. |Slides| image:: https://tqdm.github.io/img/slides.jpg
+ :target: https://tqdm.github.io/PyData2019/slides.html
+.. |Merch| image:: https://tqdm.github.io/img/merch.jpg
+ :target: https://tqdm.github.io/merch
+.. |Build-Status| image:: https://img.shields.io/github/actions/workflow/status/tqdm/tqdm/test.yml?branch=master&label=tqdm&logo=GitHub
+ :target: https://github.com/tqdm/tqdm/actions/workflows/test.yml
+.. |Coverage-Status| image:: https://img.shields.io/coveralls/github/tqdm/tqdm/master?logo=coveralls
+ :target: https://coveralls.io/github/tqdm/tqdm
+.. |Branch-Coverage-Status| image:: https://codecov.io/gh/tqdm/tqdm/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/tqdm/tqdm
+.. |Codacy-Grade| image:: https://app.codacy.com/project/badge/Grade/3f965571598f44549c7818f29cdcf177
+ :target: https://www.codacy.com/gh/tqdm/tqdm/dashboard
+.. |CII Best Practices| image:: https://bestpractices.coreinfrastructure.org/projects/3264/badge
+ :target: https://bestpractices.coreinfrastructure.org/projects/3264
+.. |GitHub-Status| image:: https://img.shields.io/github/tag/tqdm/tqdm.svg?maxAge=86400&logo=github&logoColor=white
+ :target: https://github.com/tqdm/tqdm/releases
+.. |GitHub-Forks| image:: https://img.shields.io/github/forks/tqdm/tqdm.svg?logo=github&logoColor=white
+ :target: https://github.com/tqdm/tqdm/network
+.. |GitHub-Stars| image:: https://img.shields.io/github/stars/tqdm/tqdm.svg?logo=github&logoColor=white
+ :target: https://github.com/tqdm/tqdm/stargazers
+.. |GitHub-Commits| image:: https://img.shields.io/github/commit-activity/y/tqdm/tqdm.svg?logo=git&logoColor=white
+ :target: https://github.com/tqdm/tqdm/graphs/commit-activity
+.. |GitHub-Issues| image:: https://img.shields.io/github/issues-closed/tqdm/tqdm.svg?logo=github&logoColor=white
+ :target: https://github.com/tqdm/tqdm/issues?q=
+.. |GitHub-PRs| image:: https://img.shields.io/github/issues-pr-closed/tqdm/tqdm.svg?logo=github&logoColor=white
+ :target: https://github.com/tqdm/tqdm/pulls
+.. |GitHub-Contributions| image:: https://img.shields.io/github/contributors/tqdm/tqdm.svg?logo=github&logoColor=white
+ :target: https://github.com/tqdm/tqdm/graphs/contributors
+.. |GitHub-Updated| image:: https://img.shields.io/github/last-commit/tqdm/tqdm/master.svg?logo=github&logoColor=white&label=pushed
+ :target: https://github.com/tqdm/tqdm/pulse
+.. |Gift-Casper| image:: https://img.shields.io/badge/dynamic/json.svg?color=ff69b4&label=gifts%20received&prefix=%C2%A3&query=%24..sum&url=https%3A%2F%2Fcaspersci.uk.to%2Fgifts.json
+ :target: https://cdcl.ml/sponsor
+.. |Versions| image:: https://img.shields.io/pypi/v/tqdm.svg
+ :target: https://tqdm.github.io/releases
+.. |PyPI-Downloads| image:: https://img.shields.io/pypi/dm/tqdm.svg?label=pypi%20downloads&logo=PyPI&logoColor=white
+ :target: https://pepy.tech/project/tqdm
+.. |Py-Versions| image:: https://img.shields.io/pypi/pyversions/tqdm.svg?logo=python&logoColor=white
+ :target: https://pypi.org/project/tqdm
+.. |Conda-Forge-Status| image:: https://img.shields.io/conda/v/conda-forge/tqdm.svg?label=conda-forge&logo=conda-forge
+ :target: https://anaconda.org/conda-forge/tqdm
+.. |Snapcraft| image:: https://img.shields.io/badge/snap-install-82BEA0.svg?logo=snapcraft
+ :target: https://snapcraft.io/tqdm
+.. |Docker| image:: https://img.shields.io/badge/docker-pull-blue.svg?logo=docker&logoColor=white
+ :target: https://hub.docker.com/r/tqdm/tqdm
+.. |Libraries-Rank| image:: https://img.shields.io/librariesio/sourcerank/pypi/tqdm.svg?logo=koding&logoColor=white
+ :target: https://libraries.io/pypi/tqdm
+.. |Libraries-Dependents| image:: https://img.shields.io/librariesio/dependent-repos/pypi/tqdm.svg?logo=koding&logoColor=white
+ :target: https://github.com/tqdm/tqdm/network/dependents
+.. |OpenHub-Status| image:: https://www.openhub.net/p/tqdm/widgets/project_thin_badge?format=gif
+ :target: https://www.openhub.net/p/tqdm?ref=Thin+badge
+.. |awesome-python| image:: https://awesome.re/mentioned-badge.svg
+ :target: https://github.com/vinta/awesome-python
+.. |LICENCE| image:: https://img.shields.io/pypi/l/tqdm.svg
+ :target: https://raw.githubusercontent.com/tqdm/tqdm/master/LICENCE
+.. |DOI| image:: https://img.shields.io/badge/DOI-10.5281/zenodo.595120-blue.svg
+ :target: https://doi.org/10.5281/zenodo.595120
+.. |binder-demo| image:: https://mybinder.org/badge_logo.svg
+ :target: https://mybinder.org/v2/gh/tqdm/tqdm/master?filepath=DEMO.ipynb
+.. |Screenshot-Jupyter1| image:: https://tqdm.github.io/img/jupyter-1.gif
+.. |Screenshot-Jupyter2| image:: https://tqdm.github.io/img/jupyter-2.gif
+.. |Screenshot-Jupyter3| image:: https://tqdm.github.io/img/jupyter-3.gif
+.. |README-Hits| image:: https://cgi.cdcl.ml/hits?q=tqdm&style=social&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif
+ :target: https://cgi.cdcl.ml/hits?q=tqdm&a=plot&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif&style=social
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/RECORD"
new file mode 100644
index 0000000..8f7d49f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/RECORD"
@@ -0,0 +1,75 @@
+../../Scripts/tqdm.exe,sha256=eSIItzLgr-SWbMde2FvDzZRioJlKLEOmJa3Y65sUy4k,108396
+tqdm-4.67.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+tqdm-4.67.1.dist-info/LICENCE,sha256=3DMlLoKQFeOxUAhvubOkD2rW-zLC9GEM6BL6Z301mGo,1985
+tqdm-4.67.1.dist-info/METADATA,sha256=aIoWMt9SWhmP7FLc_vsSRtMerO6cA1qsrC1-r42P9mk,57675
+tqdm-4.67.1.dist-info/RECORD,,
+tqdm-4.67.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+tqdm-4.67.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
+tqdm-4.67.1.dist-info/entry_points.txt,sha256=ReJCH7Ui3Zyh6M16E4OhsZ1oU7WtMXCfbtoyBhGO29Y,39
+tqdm-4.67.1.dist-info/top_level.txt,sha256=NLiUJNfmc9At15s7JURiwvqMEjUi9G5PMGRrmMYzNSM,5
+tqdm/__init__.py,sha256=9mQNYSSqP99JasubEC1POJLMmhkkBH6cJZxPIR5G2pQ,1572
+tqdm/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
+tqdm/__pycache__/__init__.cpython-312.pyc,,
+tqdm/__pycache__/__main__.cpython-312.pyc,,
+tqdm/__pycache__/_dist_ver.cpython-312.pyc,,
+tqdm/__pycache__/_main.cpython-312.pyc,,
+tqdm/__pycache__/_monitor.cpython-312.pyc,,
+tqdm/__pycache__/_tqdm.cpython-312.pyc,,
+tqdm/__pycache__/_tqdm_gui.cpython-312.pyc,,
+tqdm/__pycache__/_tqdm_notebook.cpython-312.pyc,,
+tqdm/__pycache__/_tqdm_pandas.cpython-312.pyc,,
+tqdm/__pycache__/_utils.cpython-312.pyc,,
+tqdm/__pycache__/asyncio.cpython-312.pyc,,
+tqdm/__pycache__/auto.cpython-312.pyc,,
+tqdm/__pycache__/autonotebook.cpython-312.pyc,,
+tqdm/__pycache__/cli.cpython-312.pyc,,
+tqdm/__pycache__/dask.cpython-312.pyc,,
+tqdm/__pycache__/gui.cpython-312.pyc,,
+tqdm/__pycache__/keras.cpython-312.pyc,,
+tqdm/__pycache__/notebook.cpython-312.pyc,,
+tqdm/__pycache__/rich.cpython-312.pyc,,
+tqdm/__pycache__/std.cpython-312.pyc,,
+tqdm/__pycache__/tk.cpython-312.pyc,,
+tqdm/__pycache__/utils.cpython-312.pyc,,
+tqdm/__pycache__/version.cpython-312.pyc,,
+tqdm/_dist_ver.py,sha256=m5AdYI-jB-v6P0VJ_70isH_p24EzSOGSwVvuAZmkmKY,23
+tqdm/_main.py,sha256=9ySvgmi_2Sw4CAo5UDW0Q2dxfTryboEWGHohfCJz0sA,283
+tqdm/_monitor.py,sha256=Uku-DPWgzJ7dO5CK08xKJK-E_F6qQ-JB3ksuXczSYR0,3699
+tqdm/_tqdm.py,sha256=LfLCuJ6bpsVo9xilmtBXyEm1vGnUCFrliW85j3J-nD4,283
+tqdm/_tqdm_gui.py,sha256=03Hc8KayxJveieI5-0-2NGiDpLvw9jZekofJUV7CCwk,287
+tqdm/_tqdm_notebook.py,sha256=BuHiLuxu6uEfZFaPJW3RPpPaxaVctEQA3kdSJSDL1hw,307
+tqdm/_tqdm_pandas.py,sha256=c9jptUgigN6axRDhRd4Rif98Tmxeopc1nFNFhIpbFUE,888
+tqdm/_utils.py,sha256=_4E73bfDj4f1s3sM42NLHNrZDOkijZoWq-n6xWLkdZ8,553
+tqdm/asyncio.py,sha256=Kp2rSkNRf9KRqa3d9YpgeZQ7L7EZf2Ki4bSc7UPIyoo,2757
+tqdm/auto.py,sha256=nDZflj6p2zKkjBCNBourrhS81zYfZy1_dQvbckrdW8o,871
+tqdm/autonotebook.py,sha256=Yb9F5uaiBPhfbDDFpbtoG8I2YUw3uQJ89rUDLbfR6ws,956
+tqdm/cli.py,sha256=SbKlN8QyZ2ogenqt-wT_p6_sx2OOdCjCyhoZBFnlmyI,11010
+tqdm/completion.sh,sha256=j79KbSmpIj_E11jfTfBXrGnUTzKXVpQ1vGVQvsyDRl4,946
+tqdm/contrib/__init__.py,sha256=OgSwVXm-vlDJ-2imtoQ9z8qdom4snMSRztH72KMA82A,2494
+tqdm/contrib/__pycache__/__init__.cpython-312.pyc,,
+tqdm/contrib/__pycache__/bells.cpython-312.pyc,,
+tqdm/contrib/__pycache__/concurrent.cpython-312.pyc,,
+tqdm/contrib/__pycache__/discord.cpython-312.pyc,,
+tqdm/contrib/__pycache__/itertools.cpython-312.pyc,,
+tqdm/contrib/__pycache__/logging.cpython-312.pyc,,
+tqdm/contrib/__pycache__/slack.cpython-312.pyc,,
+tqdm/contrib/__pycache__/telegram.cpython-312.pyc,,
+tqdm/contrib/__pycache__/utils_worker.cpython-312.pyc,,
+tqdm/contrib/bells.py,sha256=Yx1HqGCmHrESCAO700j5wE__JCleNODJxedh1ijPLD0,837
+tqdm/contrib/concurrent.py,sha256=K1yjloKS5WRNFyjLRth0DmU5PAnDbF0A-GD27N-J4a8,3986
+tqdm/contrib/discord.py,sha256=MtVIL1s_dxH21G4sL8FBgQ4Wei23ho9Ek5T-AommvNc,5243
+tqdm/contrib/itertools.py,sha256=WdKKQU5eSzsqHu29SN_oH12huYZo0Jihqoi9-nVhwz4,774
+tqdm/contrib/logging.py,sha256=NsYtnKttj2mMrGm58mEdo5a9DP_2vv8pZyrimSuWulA,3760
+tqdm/contrib/slack.py,sha256=eP_Mr5sQonYniHxxQNGue3jk2JkIPmPWFZqIYxnOui0,4007
+tqdm/contrib/telegram.py,sha256=vn_9SATMbbwn2PAbzSDyOX6av3eBB01QBug11P4H-Og,5008
+tqdm/contrib/utils_worker.py,sha256=HJP5Mz1S1xyzEke2JaqJ2sYLHXADYoo2epT5AzQ38eA,1207
+tqdm/dask.py,sha256=9Ei58eVqTossRLhAfWyUFCduXYKjmLmwkaXIy-CHYfs,1319
+tqdm/gui.py,sha256=STIB3K8iDzDgkNUqWIpvcI_u0OGtbGNy5NwpALXhfWs,5479
+tqdm/keras.py,sha256=op9sBkb6q6c6dw2wJ0SD2ZwpPK7yM1Vbg4l1Qiy3MIo,4373
+tqdm/notebook.py,sha256=GtZ3IapLL1v8WNDaTSvPw0bJGTyfp71Vfz5HDnAzx1M,10895
+tqdm/rich.py,sha256=YyMPkEHVyYUVUR3adJKbVX26iTmNKpNMf3DEqmm-m60,5021
+tqdm/std.py,sha256=tWjz6-QCa92aqYjz7PIdkLUCAfiy-lJZheBtZyIIyO0,57461
+tqdm/tk.py,sha256=Gu0uwXwLCGPRGHORdi3WvBLGiseUp_xxX_h_gp9VpK0,6701
+tqdm/tqdm.1,sha256=aILyUPk2S4OPe_uWy2P4AMjUf0oQ6PUW0nLYXB-BWwI,7889
+tqdm/utils.py,sha256=6E0BQw3Sg7uGWKBM_cDn3P42tXswRhzkggbhBgLDjl8,11821
+tqdm/version.py,sha256=-1yWjfu3P0eghVsysHH07fbzdiADNRdzRtYPqOaqR2A,333
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/REQUESTED" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/REQUESTED"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/REQUESTED"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/WHEEL"
new file mode 100644
index 0000000..ae527e7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (75.6.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/entry_points.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/entry_points.txt"
new file mode 100644
index 0000000..540e60f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/entry_points.txt"
@@ -0,0 +1,2 @@
+[console_scripts]
+tqdm = tqdm.cli:main
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/top_level.txt"
new file mode 100644
index 0000000..78620c4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm-4.67.1.dist-info/top_level.txt"
@@ -0,0 +1 @@
+tqdm
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/__init__.py"
new file mode 100644
index 0000000..8081f77
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/__init__.py"
@@ -0,0 +1,38 @@
+from ._monitor import TMonitor, TqdmSynchronisationWarning
+from ._tqdm_pandas import tqdm_pandas
+from .cli import main # TODO: remove in v5.0.0
+from .gui import tqdm as tqdm_gui # TODO: remove in v5.0.0
+from .gui import trange as tgrange # TODO: remove in v5.0.0
+from .std import (
+ TqdmDeprecationWarning, TqdmExperimentalWarning, TqdmKeyError, TqdmMonitorWarning,
+ TqdmTypeError, TqdmWarning, tqdm, trange)
+from .version import __version__
+
+__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas',
+ 'tqdm_notebook', 'tnrange', 'main', 'TMonitor',
+ 'TqdmTypeError', 'TqdmKeyError',
+ 'TqdmWarning', 'TqdmDeprecationWarning',
+ 'TqdmExperimentalWarning',
+ 'TqdmMonitorWarning', 'TqdmSynchronisationWarning',
+ '__version__']
+
+
+def tqdm_notebook(*args, **kwargs): # pragma: no cover
+ """See tqdm.notebook.tqdm for full documentation"""
+ from warnings import warn
+
+ from .notebook import tqdm as _tqdm_notebook
+ warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`",
+ TqdmDeprecationWarning, stacklevel=2)
+ return _tqdm_notebook(*args, **kwargs)
+
+
+def tnrange(*args, **kwargs): # pragma: no cover
+ """Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`."""
+ from warnings import warn
+
+ from .notebook import trange as _tnrange
+ warn("Please use `tqdm.notebook.trange` instead of `tqdm.tnrange`",
+ TqdmDeprecationWarning, stacklevel=2)
+ return _tnrange(*args, **kwargs)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/__main__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/__main__.py"
new file mode 100644
index 0000000..4e28416
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/__main__.py"
@@ -0,0 +1,3 @@
+from .cli import main
+
+main()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_dist_ver.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_dist_ver.py"
new file mode 100644
index 0000000..61af7d5
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_dist_ver.py"
@@ -0,0 +1 @@
+__version__ = '4.67.1'
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_main.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_main.py"
new file mode 100644
index 0000000..04fdeef
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_main.py"
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .cli import * # NOQA
+from .cli import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.cli.*` instead of `tqdm._main.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_monitor.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_monitor.py"
new file mode 100644
index 0000000..f71aa56
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_monitor.py"
@@ -0,0 +1,95 @@
+import atexit
+from threading import Event, Thread, current_thread
+from time import time
+from warnings import warn
+
+__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
+
+
+class TqdmSynchronisationWarning(RuntimeWarning):
+ """tqdm multi-thread/-process errors which may cause incorrect nesting
+ but otherwise no adverse effects"""
+ pass
+
+
+class TMonitor(Thread):
+ """
+ Monitoring thread for tqdm bars.
+ Monitors if tqdm bars are taking too much time to display
+ and readjusts miniters automatically if necessary.
+
+ Parameters
+ ----------
+ tqdm_cls : class
+ tqdm class to use (can be core tqdm or a submodule).
+ sleep_interval : float
+ Time to sleep between monitoring checks.
+ """
+ _test = {} # internal vars for unit testing
+
+ def __init__(self, tqdm_cls, sleep_interval):
+ Thread.__init__(self)
+ self.daemon = True # kill thread when main killed (KeyboardInterrupt)
+ self.woken = 0 # last time woken up, to sync with monitor
+ self.tqdm_cls = tqdm_cls
+ self.sleep_interval = sleep_interval
+ self._time = self._test.get("time", time)
+ self.was_killed = self._test.get("Event", Event)()
+ atexit.register(self.exit)
+ self.start()
+
+ def exit(self):
+ self.was_killed.set()
+ if self is not current_thread():
+ self.join()
+ return self.report()
+
+ def get_instances(self):
+ # returns a copy of started `tqdm_cls` instances
+ return [i for i in self.tqdm_cls._instances.copy()
+ # Avoid race by checking that the instance started
+ if hasattr(i, 'start_t')]
+
+ def run(self):
+ cur_t = self._time()
+ while True:
+ # After processing and before sleeping, notify that we woke
+ # Need to be done just before sleeping
+ self.woken = cur_t
+ # Sleep some time...
+ self.was_killed.wait(self.sleep_interval)
+ # Quit if killed
+ if self.was_killed.is_set():
+ return
+ # Then monitor!
+ # Acquire lock (to access _instances)
+ with self.tqdm_cls.get_lock():
+ cur_t = self._time()
+ # Check tqdm instances are waiting too long to print
+ instances = self.get_instances()
+ for instance in instances:
+ # Check event in loop to reduce blocking time on exit
+ if self.was_killed.is_set():
+ return
+ # Only if mininterval > 1 (else iterations are just slow)
+ # and last refresh exceeded maxinterval
+ if (
+ instance.miniters > 1
+ and (cur_t - instance.last_print_t) >= instance.maxinterval
+ ):
+ # force bypassing miniters on next iteration
+ # (dynamic_miniters adjusts mininterval automatically)
+ instance.miniters = 1
+ # Refresh now! (works only for manual tqdm)
+ instance.refresh(nolock=True)
+ # Remove accidental long-lived strong reference
+ del instance
+ if instances != self.get_instances(): # pragma: nocover
+ warn("Set changed size during iteration" +
+ " (see https://github.com/tqdm/tqdm/issues/481)",
+ TqdmSynchronisationWarning, stacklevel=2)
+ # Remove accidental long-lived strong references
+ del instances
+
+ def report(self):
+ return not self.was_killed.is_set()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm.py"
new file mode 100644
index 0000000..7fc4962
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm.py"
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .std import * # NOQA
+from .std import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.std.*` instead of `tqdm._tqdm.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_gui.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_gui.py"
new file mode 100644
index 0000000..f32aa89
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_gui.py"
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .gui import * # NOQA
+from .gui import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.gui.*` instead of `tqdm._tqdm_gui.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_notebook.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_notebook.py"
new file mode 100644
index 0000000..f225fbf
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_notebook.py"
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .notebook import * # NOQA
+from .notebook import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.notebook.*` instead of `tqdm._tqdm_notebook.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_pandas.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_pandas.py"
new file mode 100644
index 0000000..c4fe6ef
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_tqdm_pandas.py"
@@ -0,0 +1,24 @@
+import sys
+
+__author__ = "github.com/casperdcl"
+__all__ = ['tqdm_pandas']
+
+
+def tqdm_pandas(tclass, **tqdm_kwargs):
+ """
+ Registers the given `tqdm` instance with
+ `pandas.core.groupby.DataFrameGroupBy.progress_apply`.
+ """
+ from tqdm import TqdmDeprecationWarning
+
+ if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith(
+ 'tqdm_')): # delayed adapter case
+ TqdmDeprecationWarning(
+ "Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.",
+ fp_write=getattr(tqdm_kwargs.get('file', None), 'write', sys.stderr.write))
+ tclass.pandas(**tqdm_kwargs)
+ else:
+ TqdmDeprecationWarning(
+ "Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.",
+ fp_write=getattr(tclass.fp, 'write', sys.stderr.write))
+ type(tclass).pandas(deprecated_t=tclass)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_utils.py"
new file mode 100644
index 0000000..385e849
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/_utils.py"
@@ -0,0 +1,11 @@
+from warnings import warn
+
+from .std import TqdmDeprecationWarning
+from .utils import ( # NOQA, pylint: disable=unused-import
+ CUR_OS, IS_NIX, IS_WIN, RE_ANSI, Comparable, FormatReplace, SimpleTextIOWrapper,
+ _environ_cols_wrapper, _is_ascii, _is_utf, _screen_shape_linux, _screen_shape_tput,
+ _screen_shape_windows, _screen_shape_wrapper, _supports_unicode, _term_move_up, colorama)
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.utils.*` instead of `tqdm._utils.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/asyncio.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/asyncio.py"
new file mode 100644
index 0000000..2d00a0a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/asyncio.py"
@@ -0,0 +1,93 @@
+"""
+Asynchronous progressbar decorator for iterators.
+Includes a default `range` iterator printing to `stderr`.
+
+Usage:
+>>> from tqdm.asyncio import trange, tqdm
+>>> async for i in trange(10):
+... ...
+"""
+import asyncio
+from sys import version_info
+
+from .std import tqdm as std_tqdm
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['tqdm_asyncio', 'tarange', 'tqdm', 'trange']
+
+
+class tqdm_asyncio(std_tqdm):
+ """
+ Asynchronous-friendly version of tqdm.
+ """
+ def __init__(self, iterable=None, *args, **kwargs):
+ super().__init__(iterable, *args, **kwargs)
+ self.iterable_awaitable = False
+ if iterable is not None:
+ if hasattr(iterable, "__anext__"):
+ self.iterable_next = iterable.__anext__
+ self.iterable_awaitable = True
+ elif hasattr(iterable, "__next__"):
+ self.iterable_next = iterable.__next__
+ else:
+ self.iterable_iterator = iter(iterable)
+ self.iterable_next = self.iterable_iterator.__next__
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ try:
+ if self.iterable_awaitable:
+ res = await self.iterable_next()
+ else:
+ res = self.iterable_next()
+ self.update()
+ return res
+ except StopIteration:
+ self.close()
+ raise StopAsyncIteration
+ except BaseException:
+ self.close()
+ raise
+
+ def send(self, *args, **kwargs):
+ return self.iterable.send(*args, **kwargs)
+
+ @classmethod
+ def as_completed(cls, fs, *, loop=None, timeout=None, total=None, **tqdm_kwargs):
+ """
+ Wrapper for `asyncio.as_completed`.
+ """
+ if total is None:
+ total = len(fs)
+ kwargs = {}
+ if version_info[:2] < (3, 10):
+ kwargs['loop'] = loop
+ yield from cls(asyncio.as_completed(fs, timeout=timeout, **kwargs),
+ total=total, **tqdm_kwargs)
+
+ @classmethod
+ async def gather(cls, *fs, loop=None, timeout=None, total=None, **tqdm_kwargs):
+ """
+ Wrapper for `asyncio.gather`.
+ """
+ async def wrap_awaitable(i, f):
+ return i, await f
+
+ ifs = [wrap_awaitable(i, f) for i, f in enumerate(fs)]
+ res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
+ total=total, **tqdm_kwargs)]
+ return [i for _, i in sorted(res)]
+
+
+def tarange(*args, **kwargs):
+ """
+ A shortcut for `tqdm.asyncio.tqdm(range(*args), **kwargs)`.
+ """
+ return tqdm_asyncio(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_asyncio
+trange = tarange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/auto.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/auto.py"
new file mode 100644
index 0000000..206c440
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/auto.py"
@@ -0,0 +1,40 @@
+"""
+Enables multiple commonly used features.
+
+Method resolution order:
+
+- `tqdm.autonotebook` without import warnings
+- `tqdm.asyncio`
+- `tqdm.std` base class
+
+Usage:
+>>> from tqdm.auto import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+import warnings
+
+from .std import TqdmExperimentalWarning
+
+with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=TqdmExperimentalWarning)
+ from .autonotebook import tqdm as notebook_tqdm
+
+from .asyncio import tqdm as asyncio_tqdm
+from .std import tqdm as std_tqdm
+
+if notebook_tqdm != std_tqdm:
+ class tqdm(notebook_tqdm, asyncio_tqdm): # pylint: disable=inconsistent-mro
+ pass
+else:
+ tqdm = asyncio_tqdm
+
+
+def trange(*args, **kwargs):
+ """
+ A shortcut for `tqdm.auto.tqdm(range(*args), **kwargs)`.
+ """
+ return tqdm(range(*args), **kwargs)
+
+
+__all__ = ["tqdm", "trange"]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/autonotebook.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/autonotebook.py"
new file mode 100644
index 0000000..a09f2ec
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/autonotebook.py"
@@ -0,0 +1,29 @@
+"""
+Automatically choose between `tqdm.notebook` and `tqdm.std`.
+
+Usage:
+>>> from tqdm.autonotebook import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+import sys
+from warnings import warn
+
+try:
+ get_ipython = sys.modules['IPython'].get_ipython
+ if 'IPKernelApp' not in get_ipython().config: # pragma: no cover
+ raise ImportError("console")
+ from .notebook import WARN_NOIPYW, IProgress
+ if IProgress is None:
+ from .std import TqdmWarning
+ warn(WARN_NOIPYW, TqdmWarning, stacklevel=2)
+ raise ImportError('ipywidgets')
+except Exception:
+ from .std import tqdm, trange
+else: # pragma: no cover
+ from .notebook import tqdm, trange
+ from .std import TqdmExperimentalWarning
+ warn("Using `tqdm.autonotebook.tqdm` in notebook mode."
+ " Use `tqdm.tqdm` instead to force console mode"
+ " (e.g. in jupyter console)", TqdmExperimentalWarning, stacklevel=2)
+__all__ = ["tqdm", "trange"]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/cli.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/cli.py"
new file mode 100644
index 0000000..e54a7fc
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/cli.py"
@@ -0,0 +1,324 @@
+"""
+Module version for monitoring CLI pipes (`... | python -m tqdm | ...`).
+"""
+import logging
+import re
+import sys
+from ast import literal_eval as numeric
+from textwrap import indent
+
+from .std import TqdmKeyError, TqdmTypeError, tqdm
+from .version import __version__
+
+__all__ = ["main"]
+log = logging.getLogger(__name__)
+
+
+def cast(val, typ):
+ log.debug((val, typ))
+ if " or " in typ:
+ for t in typ.split(" or "):
+ try:
+ return cast(val, t)
+ except TqdmTypeError:
+ pass
+ raise TqdmTypeError(f"{val} : {typ}")
+
+ # sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n')
+ if typ == 'bool':
+ if (val == 'True') or (val == ''):
+ return True
+ if val == 'False':
+ return False
+ raise TqdmTypeError(val + ' : ' + typ)
+ if typ == 'chr':
+ if len(val) == 1:
+ return val.encode()
+ if re.match(r"^\\\w+$", val):
+ return eval(f'"{val}"').encode()
+ raise TqdmTypeError(f"{val} : {typ}")
+ if typ == 'str':
+ return val
+ if typ == 'int':
+ try:
+ return int(val)
+ except ValueError as exc:
+ raise TqdmTypeError(f"{val} : {typ}") from exc
+ if typ == 'float':
+ try:
+ return float(val)
+ except ValueError as exc:
+ raise TqdmTypeError(f"{val} : {typ}") from exc
+ raise TqdmTypeError(f"{val} : {typ}")
+
+
+def posix_pipe(fin, fout, delim=b'\\n', buf_size=256,
+ callback=lambda float: None, callback_len=True):
+ """
+ Params
+ ------
+ fin : binary file with `read(buf_size : int)` method
+ fout : binary file with `write` (and optionally `flush`) methods.
+ callback : function(float), e.g.: `tqdm.update`
+ callback_len : If (default: True) do `callback(len(buffer))`.
+ Otherwise, do `callback(data) for data in buffer.split(delim)`.
+ """
+ fp_write = fout.write
+
+ if not delim:
+ while True:
+ tmp = fin.read(buf_size)
+
+ # flush at EOF
+ if not tmp:
+ getattr(fout, 'flush', lambda: None)()
+ return
+
+ fp_write(tmp)
+ callback(len(tmp))
+ # return
+
+ buf = b''
+ len_delim = len(delim)
+ # n = 0
+ while True:
+ tmp = fin.read(buf_size)
+
+ # flush at EOF
+ if not tmp:
+ if buf:
+ fp_write(buf)
+ if callback_len:
+ # n += 1 + buf.count(delim)
+ callback(1 + buf.count(delim))
+ else:
+ for i in buf.split(delim):
+ callback(i)
+ getattr(fout, 'flush', lambda: None)()
+ return # n
+
+ while True:
+ i = tmp.find(delim)
+ if i < 0:
+ buf += tmp
+ break
+ fp_write(buf + tmp[:i + len(delim)])
+ # n += 1
+ callback(1 if callback_len else (buf + tmp[:i]))
+ buf = b''
+ tmp = tmp[i + len_delim:]
+
+
+# ((opt, type), ... )
+RE_OPTS = re.compile(r'\n {4}(\S+)\s{2,}:\s*([^,]+)')
+# better split method assuming no positional args
+RE_SHLEX = re.compile(r'\s*(?<!\S)--?([^\s=]+)(\s+|=|$)')
+
+# TODO: add custom support for some of the following?
+UNSUPPORTED_OPTS = ('iterable', 'gui', 'out', 'file')
+
+# The 8 leading spaces are required for consistency
+CLI_EXTRA_DOC = r"""
+ Extra CLI Options
+ -----------------
+ name : type, optional
+ TODO: find out why this is needed.
+ delim : chr, optional
+ Delimiting character [default: '\n']. Use '\0' for null.
+ N.B.: on Windows systems, Python converts '\n' to '\r\n'.
+ buf_size : int, optional
+ String buffer size in bytes [default: 256]
+ used when `delim` is specified.
+ bytes : bool, optional
+ If true, will count bytes, ignore `delim`, and default
+ `unit_scale` to True, `unit_divisor` to 1024, and `unit` to 'B'.
+ tee : bool, optional
+ If true, passes `stdin` to both `stderr` and `stdout`.
+ update : bool, optional
+ If true, will treat input as newly elapsed iterations,
+ i.e. numbers to pass to `update()`. Note that this is slow
+ (~2e5 it/s) since every input must be decoded as a number.
+ update_to : bool, optional
+ If true, will treat input as total elapsed iterations,
+ i.e. numbers to assign to `self.n`. Note that this is slow
+ (~2e5 it/s) since every input must be decoded as a number.
+ null : bool, optional
+ If true, will discard input (no stdout).
+ manpath : str, optional
+ Directory in which to install tqdm man pages.
+ comppath : str, optional
+ Directory in which to place tqdm completion.
+ log : str, optional
+ CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET.
+"""
+
+
+def main(fp=sys.stderr, argv=None):
+ """
+ Parameters (internal use only)
+ ---------
+ fp : file-like object for tqdm
+ argv : list (default: sys.argv[1:])
+ """
+ if argv is None:
+ argv = sys.argv[1:]
+ try:
+ log_idx = argv.index('--log')
+ except ValueError:
+ for i in argv:
+ if i.startswith('--log='):
+ logLevel = i[len('--log='):]
+ break
+ else:
+ logLevel = 'INFO'
+ else:
+ # argv.pop(log_idx)
+ # logLevel = argv.pop(log_idx)
+ logLevel = argv[log_idx + 1]
+ logging.basicConfig(level=getattr(logging, logLevel),
+ format="%(levelname)s:%(module)s:%(lineno)d:%(message)s")
+
+ # py<3.13 doesn't dedent docstrings
+ d = (tqdm.__doc__ if sys.version_info < (3, 13)
+ else indent(tqdm.__doc__, " ")) + CLI_EXTRA_DOC
+
+ opt_types = dict(RE_OPTS.findall(d))
+ # opt_types['delim'] = 'chr'
+
+ for o in UNSUPPORTED_OPTS:
+ opt_types.pop(o)
+
+ log.debug(sorted(opt_types.items()))
+
+ # d = RE_OPTS.sub(r' --\1=<\1> : \2', d)
+ split = RE_OPTS.split(d)
+ opt_types_desc = zip(split[1::3], split[2::3], split[3::3])
+ d = ''.join(('\n --{0} : {2}{3}' if otd[1] == 'bool' else
+ '\n --{0}=<{1}> : {2}{3}').format(
+ otd[0].replace('_', '-'), otd[0], *otd[1:])
+ for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS)
+
+ help_short = "Usage:\n tqdm [--help | options]\n"
+ d = help_short + """
+Options:
+ -h, --help Print this help and exit.
+ -v, --version Print version and exit.
+""" + d.strip('\n') + '\n'
+
+ # opts = docopt(d, version=__version__)
+ if any(v in argv for v in ('-v', '--version')):
+ sys.stdout.write(__version__ + '\n')
+ sys.exit(0)
+ elif any(v in argv for v in ('-h', '--help')):
+ sys.stdout.write(d + '\n')
+ sys.exit(0)
+ elif argv and argv[0][:2] != '--':
+ sys.stderr.write(f"Error:Unknown argument:{argv[0]}\n{help_short}")
+
+ argv = RE_SHLEX.split(' '.join(["tqdm"] + argv))
+ opts = dict(zip(argv[1::3], argv[3::3]))
+
+ log.debug(opts)
+ opts.pop('log', True)
+
+ tqdm_args = {'file': fp}
+ try:
+ for (o, v) in opts.items():
+ o = o.replace('-', '_')
+ try:
+ tqdm_args[o] = cast(v, opt_types[o])
+ except KeyError as e:
+ raise TqdmKeyError(str(e))
+ log.debug('args:' + str(tqdm_args))
+
+ delim_per_char = tqdm_args.pop('bytes', False)
+ update = tqdm_args.pop('update', False)
+ update_to = tqdm_args.pop('update_to', False)
+ if sum((delim_per_char, update, update_to)) > 1:
+ raise TqdmKeyError("Can only have one of --bytes --update --update_to")
+ except Exception:
+ fp.write("\nError:\n" + help_short)
+ stdin, stdout_write = sys.stdin, sys.stdout.write
+ for i in stdin:
+ stdout_write(i)
+ raise
+ else:
+ buf_size = tqdm_args.pop('buf_size', 256)
+ delim = tqdm_args.pop('delim', b'\\n')
+ tee = tqdm_args.pop('tee', False)
+ manpath = tqdm_args.pop('manpath', None)
+ comppath = tqdm_args.pop('comppath', None)
+ if tqdm_args.pop('null', False):
+ class stdout(object):
+ @staticmethod
+ def write(_):
+ pass
+ else:
+ stdout = sys.stdout
+ stdout = getattr(stdout, 'buffer', stdout)
+ stdin = getattr(sys.stdin, 'buffer', sys.stdin)
+ if manpath or comppath:
+ try: # py<3.9
+ import importlib_resources as resources
+ except ImportError:
+ from importlib import resources
+ from pathlib import Path
+
+ def cp(name, dst):
+ """copy resource `name` to `dst`"""
+ fi = resources.files('tqdm') / name
+ dst.write_bytes(fi.read_bytes())
+ log.info("written:%s", dst)
+ if manpath is not None:
+ cp('tqdm.1', Path(manpath) / 'tqdm.1')
+ if comppath is not None:
+ cp('completion.sh', Path(comppath) / 'tqdm_completion.sh')
+ sys.exit(0)
+ if tee:
+ stdout_write = stdout.write
+ fp_write = getattr(fp, 'buffer', fp).write
+
+ class stdout(object): # pylint: disable=function-redefined
+ @staticmethod
+ def write(x):
+ with tqdm.external_write_mode(file=fp):
+ fp_write(x)
+ stdout_write(x)
+ if delim_per_char:
+ tqdm_args.setdefault('unit', 'B')
+ tqdm_args.setdefault('unit_scale', True)
+ tqdm_args.setdefault('unit_divisor', 1024)
+ log.debug(tqdm_args)
+ with tqdm(**tqdm_args) as t:
+ posix_pipe(stdin, stdout, '', buf_size, t.update)
+ elif delim == b'\\n':
+ log.debug(tqdm_args)
+ write = stdout.write
+ if update or update_to:
+ with tqdm(**tqdm_args) as t:
+ if update:
+ def callback(i):
+ t.update(numeric(i.decode()))
+ else: # update_to
+ def callback(i):
+ t.update(numeric(i.decode()) - t.n)
+ for i in stdin:
+ write(i)
+ callback(i)
+ else:
+ for i in tqdm(stdin, **tqdm_args):
+ write(i)
+ else:
+ log.debug(tqdm_args)
+ with tqdm(**tqdm_args) as t:
+ callback_len = False
+ if update:
+ def callback(i):
+ t.update(numeric(i.decode()))
+ elif update_to:
+ def callback(i):
+ t.update(numeric(i.decode()) - t.n)
+ else:
+ callback = t.update
+ callback_len = True
+ posix_pipe(stdin, stdout, delim, buf_size, callback, callback_len)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/completion.sh" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/completion.sh"
new file mode 100644
index 0000000..9f61c7f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/completion.sh"
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+_tqdm(){
+ local cur prv
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ prv="${COMP_WORDS[COMP_CWORD - 1]}"
+
+ case ${prv} in
+ --bar_format|--buf_size|--colour|--comppath|--delay|--delim|--desc|--initial|--lock_args|--manpath|--maxinterval|--mininterval|--miniters|--ncols|--nrows|--position|--postfix|--smoothing|--total|--unit|--unit_divisor)
+ # await user input
+ ;;
+ "--log")
+ COMPREPLY=($(compgen -W 'CRITICAL FATAL ERROR WARN WARNING INFO DEBUG NOTSET' -- ${cur}))
+ ;;
+ *)
+ COMPREPLY=($(compgen -W '--ascii --bar_format --buf_size --bytes --colour --comppath --delay --delim --desc --disable --dynamic_ncols --help --initial --leave --lock_args --log --manpath --maxinterval --mininterval --miniters --ncols --nrows --null --position --postfix --smoothing --tee --total --unit --unit_divisor --unit_scale --update --update_to --version --write_bytes -h -v' -- ${cur}))
+ ;;
+ esac
+}
+complete -F _tqdm tqdm
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/__init__.py"
new file mode 100644
index 0000000..d059461
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/__init__.py"
@@ -0,0 +1,92 @@
+"""
+Thin wrappers around common functions.
+
+Subpackages contain potentially unstable extensions.
+"""
+from warnings import warn
+
+from ..auto import tqdm as tqdm_auto
+from ..std import TqdmDeprecationWarning, tqdm
+from ..utils import ObjectWrapper
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['tenumerate', 'tzip', 'tmap']
+
+
+class DummyTqdmFile(ObjectWrapper):
+ """Dummy file-like that will write to tqdm"""
+
+ def __init__(self, wrapped):
+ super().__init__(wrapped)
+ self._buf = []
+
+ def write(self, x, nolock=False):
+ nl = b"\n" if isinstance(x, bytes) else "\n"
+ pre, sep, post = x.rpartition(nl)
+ if sep:
+ blank = type(nl)()
+ tqdm.write(blank.join(self._buf + [pre, sep]),
+ end=blank, file=self._wrapped, nolock=nolock)
+ self._buf = [post]
+ else:
+ self._buf.append(x)
+
+ def __del__(self):
+ if self._buf:
+ blank = type(self._buf[0])()
+ try:
+ tqdm.write(blank.join(self._buf), end=blank, file=self._wrapped)
+ except (OSError, ValueError):
+ pass
+
+
+def builtin_iterable(func):
+ """Returns `func`"""
+ warn("This function has no effect, and will be removed in tqdm==5.0.0",
+ TqdmDeprecationWarning, stacklevel=2)
+ return func
+
+
+def tenumerate(iterable, start=0, total=None, tqdm_class=tqdm_auto, **tqdm_kwargs):
+ """
+ Equivalent of `numpy.ndenumerate` or builtin `enumerate`.
+
+ Parameters
+ ----------
+ tqdm_class : [default: tqdm.auto.tqdm].
+ """
+ try:
+ import numpy as np
+ except ImportError:
+ pass
+ else:
+ if isinstance(iterable, np.ndarray):
+ return tqdm_class(np.ndenumerate(iterable), total=total or iterable.size,
+ **tqdm_kwargs)
+ return enumerate(tqdm_class(iterable, total=total, **tqdm_kwargs), start)
+
+
+def tzip(iter1, *iter2plus, **tqdm_kwargs):
+ """
+ Equivalent of builtin `zip`.
+
+ Parameters
+ ----------
+ tqdm_class : [default: tqdm.auto.tqdm].
+ """
+ kwargs = tqdm_kwargs.copy()
+ tqdm_class = kwargs.pop("tqdm_class", tqdm_auto)
+ for i in zip(tqdm_class(iter1, **kwargs), *iter2plus):
+ yield i
+
+
+def tmap(function, *sequences, **tqdm_kwargs):
+ """
+ Equivalent of builtin `map`.
+
+ Parameters
+ ----------
+ tqdm_class : [default: tqdm.auto.tqdm].
+ """
+ for i in tzip(*sequences, **tqdm_kwargs):
+ yield function(*i)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/bells.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/bells.py"
new file mode 100644
index 0000000..5b8f4b9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/bells.py"
@@ -0,0 +1,26 @@
+"""
+Even more features than `tqdm.auto` (all the bells & whistles):
+
+- `tqdm.auto`
+- `tqdm.tqdm.pandas`
+- `tqdm.contrib.telegram`
+ + uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}`
+- `tqdm.contrib.discord`
+ + uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}`
+"""
+__all__ = ['tqdm', 'trange']
+import warnings
+from os import getenv
+
+if getenv("TQDM_SLACK_TOKEN") and getenv("TQDM_SLACK_CHANNEL"):
+ from .slack import tqdm, trange
+elif getenv("TQDM_TELEGRAM_TOKEN") and getenv("TQDM_TELEGRAM_CHAT_ID"):
+ from .telegram import tqdm, trange
+elif getenv("TQDM_DISCORD_TOKEN") and getenv("TQDM_DISCORD_CHANNEL_ID"):
+ from .discord import tqdm, trange
+else:
+ from ..auto import tqdm, trange
+
+with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=FutureWarning)
+ tqdm.pandas()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/concurrent.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/concurrent.py"
new file mode 100644
index 0000000..cd81d62
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/concurrent.py"
@@ -0,0 +1,105 @@
+"""
+Thin wrappers around `concurrent.futures`.
+"""
+from contextlib import contextmanager
+from operator import length_hint
+from os import cpu_count
+
+from ..auto import tqdm as tqdm_auto
+from ..std import TqdmWarning
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['thread_map', 'process_map']
+
+
+@contextmanager
+def ensure_lock(tqdm_class, lock_name=""):
+ """get (create if necessary) and then restore `tqdm_class`'s lock"""
+ old_lock = getattr(tqdm_class, '_lock', None) # don't create a new lock
+ lock = old_lock or tqdm_class.get_lock() # maybe create a new lock
+ lock = getattr(lock, lock_name, lock) # maybe subtype
+ tqdm_class.set_lock(lock)
+ yield lock
+ if old_lock is None:
+ del tqdm_class._lock
+ else:
+ tqdm_class.set_lock(old_lock)
+
+
+def _executor_map(PoolExecutor, fn, *iterables, **tqdm_kwargs):
+ """
+ Implementation of `thread_map` and `process_map`.
+
+ Parameters
+ ----------
+ tqdm_class : [default: tqdm.auto.tqdm].
+ max_workers : [default: min(32, cpu_count() + 4)].
+ chunksize : [default: 1].
+ lock_name : [default: "":str].
+ """
+ kwargs = tqdm_kwargs.copy()
+ if "total" not in kwargs:
+ kwargs["total"] = length_hint(iterables[0])
+ tqdm_class = kwargs.pop("tqdm_class", tqdm_auto)
+ max_workers = kwargs.pop("max_workers", min(32, cpu_count() + 4))
+ chunksize = kwargs.pop("chunksize", 1)
+ lock_name = kwargs.pop("lock_name", "")
+ with ensure_lock(tqdm_class, lock_name=lock_name) as lk:
+ # share lock in case workers are already using `tqdm`
+ with PoolExecutor(max_workers=max_workers, initializer=tqdm_class.set_lock,
+ initargs=(lk,)) as ex:
+ return list(tqdm_class(ex.map(fn, *iterables, chunksize=chunksize), **kwargs))
+
+
+def thread_map(fn, *iterables, **tqdm_kwargs):
+ """
+ Equivalent of `list(map(fn, *iterables))`
+ driven by `concurrent.futures.ThreadPoolExecutor`.
+
+ Parameters
+ ----------
+ tqdm_class : optional
+ `tqdm` class to use for bars [default: tqdm.auto.tqdm].
+ max_workers : int, optional
+ Maximum number of workers to spawn; passed to
+ `concurrent.futures.ThreadPoolExecutor.__init__`.
+ [default: max(32, cpu_count() + 4)].
+ """
+ from concurrent.futures import ThreadPoolExecutor
+ return _executor_map(ThreadPoolExecutor, fn, *iterables, **tqdm_kwargs)
+
+
+def process_map(fn, *iterables, **tqdm_kwargs):
+ """
+ Equivalent of `list(map(fn, *iterables))`
+ driven by `concurrent.futures.ProcessPoolExecutor`.
+
+ Parameters
+ ----------
+ tqdm_class : optional
+ `tqdm` class to use for bars [default: tqdm.auto.tqdm].
+ max_workers : int, optional
+ Maximum number of workers to spawn; passed to
+ `concurrent.futures.ProcessPoolExecutor.__init__`.
+ [default: min(32, cpu_count() + 4)].
+ chunksize : int, optional
+ Size of chunks sent to worker processes; passed to
+ `concurrent.futures.ProcessPoolExecutor.map`. [default: 1].
+ lock_name : str, optional
+ Member of `tqdm_class.get_lock()` to use [default: mp_lock].
+ """
+ from concurrent.futures import ProcessPoolExecutor
+ if iterables and "chunksize" not in tqdm_kwargs:
+ # default `chunksize=1` has poor performance for large iterables
+ # (most time spent dispatching items to workers).
+ longest_iterable_len = max(map(length_hint, iterables))
+ if longest_iterable_len > 1000:
+ from warnings import warn
+ warn("Iterable length %d > 1000 but `chunksize` is not set."
+ " This may seriously degrade multiprocess performance."
+ " Set `chunksize=1` or more." % longest_iterable_len,
+ TqdmWarning, stacklevel=2)
+ if "lock_name" not in tqdm_kwargs:
+ tqdm_kwargs = tqdm_kwargs.copy()
+ tqdm_kwargs["lock_name"] = "mp_lock"
+ return _executor_map(ProcessPoolExecutor, fn, *iterables, **tqdm_kwargs)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/discord.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/discord.py"
new file mode 100644
index 0000000..574baa8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/discord.py"
@@ -0,0 +1,156 @@
+"""
+Sends updates to a Discord bot.
+
+Usage:
+>>> from tqdm.contrib.discord import tqdm, trange
+>>> for i in trange(10, token='{token}', channel_id='{channel_id}'):
+... ...
+
+
+"""
+from os import getenv
+from warnings import warn
+
+from requests import Session
+from requests.utils import default_user_agent
+
+from ..auto import tqdm as tqdm_auto
+from ..std import TqdmWarning
+from ..version import __version__
+from .utils_worker import MonoWorker
+
+__author__ = {"github.com/": ["casperdcl", "guigoruiz1"]}
+__all__ = ['DiscordIO', 'tqdm_discord', 'tdrange', 'tqdm', 'trange']
+
+
+class DiscordIO(MonoWorker):
+ """Non-blocking file-like IO using a Discord Bot."""
+ API = "https://discord.com/api/v10"
+ UA = f"tqdm (https://tqdm.github.io, {__version__}) {default_user_agent()}"
+
+ def __init__(self, token, channel_id):
+ """Creates a new message in the given `channel_id`."""
+ super().__init__()
+ self.token = token
+ self.channel_id = channel_id
+ self.session = Session()
+ self.text = self.__class__.__name__
+ self.message_id
+
+ @property
+ def message_id(self):
+ if hasattr(self, '_message_id'):
+ return self._message_id
+ try:
+ res = self.session.post(
+ f'{self.API}/channels/{self.channel_id}/messages',
+ headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA},
+ json={'content': f"`{self.text}`"}).json()
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ if res.get('error_code') == 429:
+ warn("Creation rate limit: try increasing `mininterval`.",
+ TqdmWarning, stacklevel=2)
+ else:
+ self._message_id = res['id']
+ return self._message_id
+
+ def write(self, s):
+ """Replaces internal `message_id`'s text with `s`."""
+ if not s:
+ s = "..."
+ s = s.replace('\r', '').strip()
+ if s == self.text:
+ return # avoid duplicate message Bot error
+ message_id = self.message_id
+ if message_id is None:
+ return
+ self.text = s
+ try:
+ future = self.submit(
+ self.session.patch,
+ f'{self.API}/channels/{self.channel_id}/messages/{message_id}',
+ headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA},
+ json={'content': f"`{self.text}`"})
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ return future
+
+ def delete(self):
+ """Deletes internal `message_id`."""
+ try:
+ future = self.submit(
+ self.session.delete,
+ f'{self.API}/channels/{self.channel_id}/messages/{self.message_id}',
+ headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA})
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ return future
+
+
+class tqdm_discord(tqdm_auto):
+ """
+ Standard `tqdm.auto.tqdm` but also sends updates to a Discord Bot.
+ May take a few seconds to create (`__init__`).
+
+ - create a discord bot (not public, no requirement of OAuth2 code
+ grant, only send message permissions) & invite it to a channel:
+ <https://discordpy.readthedocs.io/en/latest/discord.html>
+ - copy the bot `{token}` & `{channel_id}` and paste below
+
+ >>> from tqdm.contrib.discord import tqdm, trange
+ >>> for i in tqdm(iterable, token='{token}', channel_id='{channel_id}'):
+ ... ...
+ """
+ def __init__(self, *args, **kwargs):
+ """
+ Parameters
+ ----------
+ token : str, required. Discord bot token
+ [default: ${TQDM_DISCORD_TOKEN}].
+ channel_id : int, required. Discord channel ID
+ [default: ${TQDM_DISCORD_CHANNEL_ID}].
+
+ See `tqdm.auto.tqdm.__init__` for other parameters.
+ """
+ if not kwargs.get('disable'):
+ kwargs = kwargs.copy()
+ self.dio = DiscordIO(
+ kwargs.pop('token', getenv('TQDM_DISCORD_TOKEN')),
+ kwargs.pop('channel_id', getenv('TQDM_DISCORD_CHANNEL_ID')))
+ super().__init__(*args, **kwargs)
+
+ def display(self, **kwargs):
+ super().display(**kwargs)
+ fmt = self.format_dict
+ if fmt.get('bar_format', None):
+ fmt['bar_format'] = fmt['bar_format'].replace(
+ '<bar/>', '{bar:10u}').replace('{bar}', '{bar:10u}')
+ else:
+ fmt['bar_format'] = '{l_bar}{bar:10u}{r_bar}'
+ self.dio.write(self.format_meter(**fmt))
+
+ def clear(self, *args, **kwargs):
+ super().clear(*args, **kwargs)
+ if not self.disable:
+ self.dio.write("")
+
+ def close(self):
+ if self.disable:
+ return
+ super().close()
+ if not (self.leave or (self.leave is None and self.pos == 0)):
+ self.dio.delete()
+
+
+def tdrange(*args, **kwargs):
+ """Shortcut for `tqdm.contrib.discord.tqdm(range(*args), **kwargs)`."""
+ return tqdm_discord(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_discord
+trange = tdrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/itertools.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/itertools.py"
new file mode 100644
index 0000000..e67651a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/itertools.py"
@@ -0,0 +1,35 @@
+"""
+Thin wrappers around `itertools`.
+"""
+import itertools
+
+from ..auto import tqdm as tqdm_auto
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['product']
+
+
+def product(*iterables, **tqdm_kwargs):
+ """
+ Equivalent of `itertools.product`.
+
+ Parameters
+ ----------
+ tqdm_class : [default: tqdm.auto.tqdm].
+ """
+ kwargs = tqdm_kwargs.copy()
+ tqdm_class = kwargs.pop("tqdm_class", tqdm_auto)
+ try:
+ lens = list(map(len, iterables))
+ except TypeError:
+ total = None
+ else:
+ total = 1
+ for i in lens:
+ total *= i
+ kwargs.setdefault("total", total)
+ with tqdm_class(**kwargs) as t:
+ it = itertools.product(*iterables)
+ for i in it:
+ yield i
+ t.update()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/logging.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/logging.py"
new file mode 100644
index 0000000..e06febe
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/logging.py"
@@ -0,0 +1,126 @@
+"""
+Helper functionality for interoperability with stdlib `logging`.
+"""
+import logging
+import sys
+from contextlib import contextmanager
+
+try:
+ from typing import Iterator, List, Optional, Type # noqa: F401
+except ImportError:
+ pass
+
+from ..std import tqdm as std_tqdm
+
+
+class _TqdmLoggingHandler(logging.StreamHandler):
+ def __init__(
+ self,
+ tqdm_class=std_tqdm # type: Type[std_tqdm]
+ ):
+ super().__init__()
+ self.tqdm_class = tqdm_class
+
+ def emit(self, record):
+ try:
+ msg = self.format(record)
+ self.tqdm_class.write(msg, file=self.stream)
+ self.flush()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except: # noqa pylint: disable=bare-except
+ self.handleError(record)
+
+
+def _is_console_logging_handler(handler):
+ return (isinstance(handler, logging.StreamHandler)
+ and handler.stream in {sys.stdout, sys.stderr})
+
+
+def _get_first_found_console_logging_handler(handlers):
+ for handler in handlers:
+ if _is_console_logging_handler(handler):
+ return handler
+
+
+@contextmanager
+def logging_redirect_tqdm(
+ loggers=None, # type: Optional[List[logging.Logger]],
+ tqdm_class=std_tqdm # type: Type[std_tqdm]
+):
+ # type: (...) -> Iterator[None]
+ """
+ Context manager redirecting console logging to `tqdm.write()`, leaving
+ other logging handlers (e.g. log files) unaffected.
+
+ Parameters
+ ----------
+ loggers : list, optional
+ Which handlers to redirect (default: [logging.root]).
+ tqdm_class : optional
+
+ Example
+ -------
+ ```python
+ import logging
+ from tqdm import trange
+ from tqdm.contrib.logging import logging_redirect_tqdm
+
+ LOG = logging.getLogger(__name__)
+
+ if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO)
+ with logging_redirect_tqdm():
+ for i in trange(9):
+ if i == 4:
+ LOG.info("console logging redirected to `tqdm.write()`")
+ # logging restored
+ ```
+ """
+ if loggers is None:
+ loggers = [logging.root]
+ original_handlers_list = [logger.handlers for logger in loggers]
+ try:
+ for logger in loggers:
+ tqdm_handler = _TqdmLoggingHandler(tqdm_class)
+ orig_handler = _get_first_found_console_logging_handler(logger.handlers)
+ if orig_handler is not None:
+ tqdm_handler.setFormatter(orig_handler.formatter)
+ tqdm_handler.stream = orig_handler.stream
+ logger.handlers = [
+ handler for handler in logger.handlers
+ if not _is_console_logging_handler(handler)] + [tqdm_handler]
+ yield
+ finally:
+ for logger, original_handlers in zip(loggers, original_handlers_list):
+ logger.handlers = original_handlers
+
+
+@contextmanager
+def tqdm_logging_redirect(
+ *args,
+ # loggers=None, # type: Optional[List[logging.Logger]]
+ # tqdm=None, # type: Optional[Type[tqdm.tqdm]]
+ **kwargs
+):
+ # type: (...) -> Iterator[None]
+ """
+ Convenience shortcut for:
+ ```python
+ with tqdm_class(*args, **tqdm_kwargs) as pbar:
+ with logging_redirect_tqdm(loggers=loggers, tqdm_class=tqdm_class):
+ yield pbar
+ ```
+
+ Parameters
+ ----------
+ tqdm_class : optional, (default: tqdm.std.tqdm).
+ loggers : optional, list.
+ **tqdm_kwargs : passed to `tqdm_class`.
+ """
+ tqdm_kwargs = kwargs.copy()
+ loggers = tqdm_kwargs.pop('loggers', None)
+ tqdm_class = tqdm_kwargs.pop('tqdm_class', std_tqdm)
+ with tqdm_class(*args, **tqdm_kwargs) as pbar:
+ with logging_redirect_tqdm(loggers=loggers, tqdm_class=tqdm_class):
+ yield pbar
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/slack.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/slack.py"
new file mode 100644
index 0000000..9bca8ee
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/slack.py"
@@ -0,0 +1,120 @@
+"""
+Sends updates to a Slack app.
+
+Usage:
+>>> from tqdm.contrib.slack import tqdm, trange
+>>> for i in trange(10, token='{token}', channel='{channel}'):
+... ...
+
+
+"""
+import logging
+from os import getenv
+
+try:
+ from slack_sdk import WebClient
+except ImportError:
+ raise ImportError("Please `pip install slack-sdk`")
+
+from ..auto import tqdm as tqdm_auto
+from .utils_worker import MonoWorker
+
+__author__ = {"github.com/": ["0x2b3bfa0", "casperdcl"]}
+__all__ = ['SlackIO', 'tqdm_slack', 'tsrange', 'tqdm', 'trange']
+
+
+class SlackIO(MonoWorker):
+ """Non-blocking file-like IO using a Slack app."""
+ def __init__(self, token, channel):
+ """Creates a new message in the given `channel`."""
+ super().__init__()
+ self.client = WebClient(token=token)
+ self.text = self.__class__.__name__
+ try:
+ self.message = self.client.chat_postMessage(channel=channel, text=self.text)
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ self.message = None
+
+ def write(self, s):
+ """Replaces internal `message`'s text with `s`."""
+ if not s:
+ s = "..."
+ s = s.replace('\r', '').strip()
+ if s == self.text:
+ return # skip duplicate message
+ message = self.message
+ if message is None:
+ return
+ self.text = s
+ try:
+ future = self.submit(self.client.chat_update, channel=message['channel'],
+ ts=message['ts'], text='`' + s + '`')
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ return future
+
+
+class tqdm_slack(tqdm_auto):
+ """
+ Standard `tqdm.auto.tqdm` but also sends updates to a Slack app.
+ May take a few seconds to create (`__init__`).
+
+ - create a Slack app with the `chat:write` scope & invite it to a
+ channel: <https://api.slack.com/authentication/basics>
+ - copy the bot `{token}` & `{channel}` and paste below
+ >>> from tqdm.contrib.slack import tqdm, trange
+ >>> for i in tqdm(iterable, token='{token}', channel='{channel}'):
+ ... ...
+ """
+ def __init__(self, *args, **kwargs):
+ """
+ Parameters
+ ----------
+ token : str, required. Slack token
+ [default: ${TQDM_SLACK_TOKEN}].
+ channel : int, required. Slack channel
+ [default: ${TQDM_SLACK_CHANNEL}].
+ mininterval : float, optional.
+ Minimum of [default: 1.5] to avoid rate limit.
+
+ See `tqdm.auto.tqdm.__init__` for other parameters.
+ """
+ if not kwargs.get('disable'):
+ kwargs = kwargs.copy()
+ logging.getLogger("HTTPClient").setLevel(logging.WARNING)
+ self.sio = SlackIO(
+ kwargs.pop('token', getenv("TQDM_SLACK_TOKEN")),
+ kwargs.pop('channel', getenv("TQDM_SLACK_CHANNEL")))
+ kwargs['mininterval'] = max(1.5, kwargs.get('mininterval', 1.5))
+ super().__init__(*args, **kwargs)
+
+ def display(self, **kwargs):
+ super().display(**kwargs)
+ fmt = self.format_dict
+ if fmt.get('bar_format', None):
+ fmt['bar_format'] = fmt['bar_format'].replace(
+ '<bar/>', '`{bar:10}`').replace('{bar}', '`{bar:10u}`')
+ else:
+ fmt['bar_format'] = '{l_bar}`{bar:10}`{r_bar}'
+ if fmt['ascii'] is False:
+ fmt['ascii'] = [":black_square:", ":small_blue_diamond:", ":large_blue_diamond:",
+ ":large_blue_square:"]
+ fmt['ncols'] = 336
+ self.sio.write(self.format_meter(**fmt))
+
+ def clear(self, *args, **kwargs):
+ super().clear(*args, **kwargs)
+ if not self.disable:
+ self.sio.write("")
+
+
+def tsrange(*args, **kwargs):
+ """Shortcut for `tqdm.contrib.slack.tqdm(range(*args), **kwargs)`."""
+ return tqdm_slack(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_slack
+trange = tsrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/telegram.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/telegram.py"
new file mode 100644
index 0000000..0191518
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/telegram.py"
@@ -0,0 +1,153 @@
+"""
+Sends updates to a Telegram bot.
+
+Usage:
+>>> from tqdm.contrib.telegram import tqdm, trange
+>>> for i in trange(10, token='{token}', chat_id='{chat_id}'):
+... ...
+
+
+"""
+from os import getenv
+from warnings import warn
+
+from requests import Session
+
+from ..auto import tqdm as tqdm_auto
+from ..std import TqdmWarning
+from .utils_worker import MonoWorker
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['TelegramIO', 'tqdm_telegram', 'ttgrange', 'tqdm', 'trange']
+
+
+class TelegramIO(MonoWorker):
+ """Non-blocking file-like IO using a Telegram Bot."""
+ API = 'https://api.telegram.org/bot'
+
+ def __init__(self, token, chat_id):
+ """Creates a new message in the given `chat_id`."""
+ super().__init__()
+ self.token = token
+ self.chat_id = chat_id
+ self.session = Session()
+ self.text = self.__class__.__name__
+ self.message_id
+
+ @property
+ def message_id(self):
+ if hasattr(self, '_message_id'):
+ return self._message_id
+ try:
+ res = self.session.post(
+ self.API + '%s/sendMessage' % self.token,
+ data={'text': '`' + self.text + '`', 'chat_id': self.chat_id,
+ 'parse_mode': 'MarkdownV2'}).json()
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ if res.get('error_code') == 429:
+ warn("Creation rate limit: try increasing `mininterval`.",
+ TqdmWarning, stacklevel=2)
+ else:
+ self._message_id = res['result']['message_id']
+ return self._message_id
+
+ def write(self, s):
+ """Replaces internal `message_id`'s text with `s`."""
+ if not s:
+ s = "..."
+ s = s.replace('\r', '').strip()
+ if s == self.text:
+ return # avoid duplicate message Bot error
+ message_id = self.message_id
+ if message_id is None:
+ return
+ self.text = s
+ try:
+ future = self.submit(
+ self.session.post, self.API + '%s/editMessageText' % self.token,
+ data={'text': '`' + s + '`', 'chat_id': self.chat_id,
+ 'message_id': message_id, 'parse_mode': 'MarkdownV2'})
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ return future
+
+ def delete(self):
+ """Deletes internal `message_id`."""
+ try:
+ future = self.submit(
+ self.session.post, self.API + '%s/deleteMessage' % self.token,
+ data={'chat_id': self.chat_id, 'message_id': self.message_id})
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ return future
+
+
+class tqdm_telegram(tqdm_auto):
+ """
+ Standard `tqdm.auto.tqdm` but also sends updates to a Telegram Bot.
+ May take a few seconds to create (`__init__`).
+
+ - create a bot <https://core.telegram.org/bots#6-botfather>
+ - copy its `{token}`
+ - add the bot to a chat and send it a message such as `/start`
+ - go to <https://api.telegram.org/bot`{token}`/getUpdates> to find out
+ the `{chat_id}`
+ - paste the `{token}` & `{chat_id}` below
+
+ >>> from tqdm.contrib.telegram import tqdm, trange
+ >>> for i in tqdm(iterable, token='{token}', chat_id='{chat_id}'):
+ ... ...
+ """
+ def __init__(self, *args, **kwargs):
+ """
+ Parameters
+ ----------
+ token : str, required. Telegram token
+ [default: ${TQDM_TELEGRAM_TOKEN}].
+ chat_id : str, required. Telegram chat ID
+ [default: ${TQDM_TELEGRAM_CHAT_ID}].
+
+ See `tqdm.auto.tqdm.__init__` for other parameters.
+ """
+ if not kwargs.get('disable'):
+ kwargs = kwargs.copy()
+ self.tgio = TelegramIO(
+ kwargs.pop('token', getenv('TQDM_TELEGRAM_TOKEN')),
+ kwargs.pop('chat_id', getenv('TQDM_TELEGRAM_CHAT_ID')))
+ super().__init__(*args, **kwargs)
+
+ def display(self, **kwargs):
+ super().display(**kwargs)
+ fmt = self.format_dict
+ if fmt.get('bar_format', None):
+ fmt['bar_format'] = fmt['bar_format'].replace(
+ '<bar/>', '{bar:10u}').replace('{bar}', '{bar:10u}')
+ else:
+ fmt['bar_format'] = '{l_bar}{bar:10u}{r_bar}'
+ self.tgio.write(self.format_meter(**fmt))
+
+ def clear(self, *args, **kwargs):
+ super().clear(*args, **kwargs)
+ if not self.disable:
+ self.tgio.write("")
+
+ def close(self):
+ if self.disable:
+ return
+ super().close()
+ if not (self.leave or (self.leave is None and self.pos == 0)):
+ self.tgio.delete()
+
+
+def ttgrange(*args, **kwargs):
+ """Shortcut for `tqdm.contrib.telegram.tqdm(range(*args), **kwargs)`."""
+ return tqdm_telegram(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_telegram
+trange = ttgrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/utils_worker.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/utils_worker.py"
new file mode 100644
index 0000000..2a03a2a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/contrib/utils_worker.py"
@@ -0,0 +1,38 @@
+"""
+IO/concurrency helpers for `tqdm.contrib`.
+"""
+from collections import deque
+from concurrent.futures import ThreadPoolExecutor
+
+from ..auto import tqdm as tqdm_auto
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['MonoWorker']
+
+
+class MonoWorker(object):
+ """
+ Supports one running task and one waiting task.
+ The waiting task is the most recent submitted (others are discarded).
+ """
+ def __init__(self):
+ self.pool = ThreadPoolExecutor(max_workers=1)
+ self.futures = deque([], 2)
+
+ def submit(self, func, *args, **kwargs):
+ """`func(*args, **kwargs)` may replace currently waiting task."""
+ futures = self.futures
+ if len(futures) == futures.maxlen:
+ running = futures.popleft()
+ if not running.done():
+ if len(futures): # clear waiting
+ waiting = futures.pop()
+ waiting.cancel()
+ futures.appendleft(running) # re-insert running
+ try:
+ waiting = self.pool.submit(func, *args, **kwargs)
+ except Exception as e:
+ tqdm_auto.write(str(e))
+ else:
+ futures.append(waiting)
+ return waiting
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/dask.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/dask.py"
new file mode 100644
index 0000000..57f1b66
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/dask.py"
@@ -0,0 +1,44 @@
+from functools import partial
+
+from dask.callbacks import Callback
+
+from .auto import tqdm as tqdm_auto
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['TqdmCallback']
+
+
+class TqdmCallback(Callback):
+ """Dask callback for task progress."""
+ def __init__(self, start=None, pretask=None, tqdm_class=tqdm_auto,
+ **tqdm_kwargs):
+ """
+ Parameters
+ ----------
+ tqdm_class : optional
+ `tqdm` class to use for bars [default: `tqdm.auto.tqdm`].
+ tqdm_kwargs : optional
+ Any other arguments used for all bars.
+ """
+ super().__init__(start=start, pretask=pretask)
+ if tqdm_kwargs:
+ tqdm_class = partial(tqdm_class, **tqdm_kwargs)
+ self.tqdm_class = tqdm_class
+
+ def _start_state(self, _, state):
+ self.pbar = self.tqdm_class(total=sum(
+ len(state[k]) for k in ['ready', 'waiting', 'running', 'finished']))
+
+ def _posttask(self, *_, **__):
+ self.pbar.update()
+
+ def _finish(self, *_, **__):
+ self.pbar.close()
+
+ def display(self):
+ """Displays in the current cell in Notebooks."""
+ container = getattr(self.bar, 'container', None)
+ if container is None:
+ return
+ from .notebook import display
+ display(container)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/gui.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/gui.py"
new file mode 100644
index 0000000..cb52fb9
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/gui.py"
@@ -0,0 +1,179 @@
+"""
+Matplotlib GUI progressbar decorator for iterators.
+
+Usage:
+>>> from tqdm.gui import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+# future division is important to divide integers and get as
+# a result precise floating numbers (instead of truncated int)
+import re
+from warnings import warn
+
+# to inherit from the tqdm class
+from .std import TqdmExperimentalWarning
+from .std import tqdm as std_tqdm
+
+# import compatibility functions and utilities
+
+__author__ = {"github.com/": ["casperdcl", "lrq3000"]}
+__all__ = ['tqdm_gui', 'tgrange', 'tqdm', 'trange']
+
+
+class tqdm_gui(std_tqdm): # pragma: no cover
+ """Experimental Matplotlib GUI version of tqdm!"""
+ # TODO: @classmethod: write() on GUI?
+ def __init__(self, *args, **kwargs):
+ from collections import deque
+
+ import matplotlib as mpl
+ import matplotlib.pyplot as plt
+ kwargs = kwargs.copy()
+ kwargs['gui'] = True
+ colour = kwargs.pop('colour', 'g')
+ super().__init__(*args, **kwargs)
+
+ if self.disable:
+ return
+
+ warn("GUI is experimental/alpha", TqdmExperimentalWarning, stacklevel=2)
+ self.mpl = mpl
+ self.plt = plt
+
+ # Remember if external environment uses toolbars
+ self.toolbar = self.mpl.rcParams['toolbar']
+ self.mpl.rcParams['toolbar'] = 'None'
+
+ self.mininterval = max(self.mininterval, 0.5)
+ self.fig, ax = plt.subplots(figsize=(9, 2.2))
+ # self.fig.subplots_adjust(bottom=0.2)
+ total = self.__len__() # avoids TypeError on None #971
+ if total is not None:
+ self.xdata = []
+ self.ydata = []
+ self.zdata = []
+ else:
+ self.xdata = deque([])
+ self.ydata = deque([])
+ self.zdata = deque([])
+ self.line1, = ax.plot(self.xdata, self.ydata, color='b')
+ self.line2, = ax.plot(self.xdata, self.zdata, color='k')
+ ax.set_ylim(0, 0.001)
+ if total is not None:
+ ax.set_xlim(0, 100)
+ ax.set_xlabel("percent")
+ self.fig.legend((self.line1, self.line2), ("cur", "est"),
+ loc='center right')
+ # progressbar
+ self.hspan = plt.axhspan(0, 0.001, xmin=0, xmax=0, color=colour)
+ else:
+ # ax.set_xlim(-60, 0)
+ ax.set_xlim(0, 60)
+ ax.invert_xaxis()
+ ax.set_xlabel("seconds")
+ ax.legend(("cur", "est"), loc='lower left')
+ ax.grid()
+ # ax.set_xlabel('seconds')
+ ax.set_ylabel((self.unit if self.unit else "it") + "/s")
+ if self.unit_scale:
+ plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
+ ax.yaxis.get_offset_text().set_x(-0.15)
+
+ # Remember if external environment is interactive
+ self.wasion = plt.isinteractive()
+ plt.ion()
+ self.ax = ax
+
+ def close(self):
+ if self.disable:
+ return
+
+ self.disable = True
+
+ with self.get_lock():
+ self._instances.remove(self)
+
+ # Restore toolbars
+ self.mpl.rcParams['toolbar'] = self.toolbar
+ # Return to non-interactive mode
+ if not self.wasion:
+ self.plt.ioff()
+ if self.leave:
+ self.display()
+ else:
+ self.plt.close(self.fig)
+
+ def clear(self, *_, **__):
+ pass
+
+ def display(self, *_, **__):
+ n = self.n
+ cur_t = self._time()
+ elapsed = cur_t - self.start_t
+ delta_it = n - self.last_print_n
+ delta_t = cur_t - self.last_print_t
+
+ # Inline due to multiple calls
+ total = self.total
+ xdata = self.xdata
+ ydata = self.ydata
+ zdata = self.zdata
+ ax = self.ax
+ line1 = self.line1
+ line2 = self.line2
+ hspan = getattr(self, 'hspan', None)
+ # instantaneous rate
+ y = delta_it / delta_t
+ # overall rate
+ z = n / elapsed
+ # update line data
+ xdata.append(n * 100.0 / total if total else cur_t)
+ ydata.append(y)
+ zdata.append(z)
+
+ # Discard old values
+ # xmin, xmax = ax.get_xlim()
+ # if (not total) and elapsed > xmin * 1.1:
+ if (not total) and elapsed > 66:
+ xdata.popleft()
+ ydata.popleft()
+ zdata.popleft()
+
+ ymin, ymax = ax.get_ylim()
+ if y > ymax or z > ymax:
+ ymax = 1.1 * y
+ ax.set_ylim(ymin, ymax)
+ ax.figure.canvas.draw()
+
+ if total:
+ line1.set_data(xdata, ydata)
+ line2.set_data(xdata, zdata)
+ if hspan:
+ hspan.set_xy((0, ymin))
+ hspan.set_height(ymax - ymin)
+ hspan.set_width(n / total)
+ else:
+ t_ago = [cur_t - i for i in xdata]
+ line1.set_data(t_ago, ydata)
+ line2.set_data(t_ago, zdata)
+
+ d = self.format_dict
+ # remove {bar}
+ d['bar_format'] = (d['bar_format'] or "{l_bar}<bar/>{r_bar}").replace(
+ "{bar}", "<bar/>")
+ msg = self.format_meter(**d)
+ if '<bar/>' in msg:
+ msg = "".join(re.split(r'\|?<bar/>\|?', msg, maxsplit=1))
+ ax.set_title(msg, fontname="DejaVu Sans Mono", fontsize=11)
+ self.plt.pause(1e-9)
+
+
+def tgrange(*args, **kwargs):
+ """Shortcut for `tqdm.gui.tqdm(range(*args), **kwargs)`."""
+ return tqdm_gui(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_gui
+trange = tgrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/keras.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/keras.py"
new file mode 100644
index 0000000..cce9467
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/keras.py"
@@ -0,0 +1,122 @@
+from copy import copy
+from functools import partial
+
+from .auto import tqdm as tqdm_auto
+
+try:
+ import keras
+except (ImportError, AttributeError) as e:
+ try:
+ from tensorflow import keras
+ except ImportError:
+ raise e
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['TqdmCallback']
+
+
+class TqdmCallback(keras.callbacks.Callback):
+ """Keras callback for epoch and batch progress."""
+ @staticmethod
+ def bar2callback(bar, pop=None, delta=(lambda logs: 1)):
+ def callback(_, logs=None):
+ n = delta(logs)
+ if logs:
+ if pop:
+ logs = copy(logs)
+ [logs.pop(i, 0) for i in pop]
+ bar.set_postfix(logs, refresh=False)
+ bar.update(n)
+
+ return callback
+
+ def __init__(self, epochs=None, data_size=None, batch_size=None, verbose=1,
+ tqdm_class=tqdm_auto, **tqdm_kwargs):
+ """
+ Parameters
+ ----------
+ epochs : int, optional
+ data_size : int, optional
+ Number of training pairs.
+ batch_size : int, optional
+ Number of training pairs per batch.
+ verbose : int
+ 0: epoch, 1: batch (transient), 2: batch. [default: 1].
+ Will be set to `0` unless both `data_size` and `batch_size`
+ are given.
+ tqdm_class : optional
+ `tqdm` class to use for bars [default: `tqdm.auto.tqdm`].
+ tqdm_kwargs : optional
+ Any other arguments used for all bars.
+ """
+ if tqdm_kwargs:
+ tqdm_class = partial(tqdm_class, **tqdm_kwargs)
+ self.tqdm_class = tqdm_class
+ self.epoch_bar = tqdm_class(total=epochs, unit='epoch')
+ self.on_epoch_end = self.bar2callback(self.epoch_bar)
+ if data_size and batch_size:
+ self.batches = batches = (data_size + batch_size - 1) // batch_size
+ else:
+ self.batches = batches = None
+ self.verbose = verbose
+ if verbose == 1:
+ self.batch_bar = tqdm_class(total=batches, unit='batch', leave=False)
+ self.on_batch_end = self.bar2callback(
+ self.batch_bar, pop=['batch', 'size'],
+ delta=lambda logs: logs.get('size', 1))
+
+ def on_train_begin(self, *_, **__):
+ params = self.params.get
+ auto_total = params('epochs', params('nb_epoch', None))
+ if auto_total is not None and auto_total != self.epoch_bar.total:
+ self.epoch_bar.reset(total=auto_total)
+
+ def on_epoch_begin(self, epoch, *_, **__):
+ if self.epoch_bar.n < epoch:
+ ebar = self.epoch_bar
+ ebar.n = ebar.last_print_n = ebar.initial = epoch
+ if self.verbose:
+ params = self.params.get
+ total = params('samples', params(
+ 'nb_sample', params('steps', None))) or self.batches
+ if self.verbose == 2:
+ if hasattr(self, 'batch_bar'):
+ self.batch_bar.close()
+ self.batch_bar = self.tqdm_class(
+ total=total, unit='batch', leave=True,
+ unit_scale=1 / (params('batch_size', 1) or 1))
+ self.on_batch_end = self.bar2callback(
+ self.batch_bar, pop=['batch', 'size'],
+ delta=lambda logs: logs.get('size', 1))
+ elif self.verbose == 1:
+ self.batch_bar.unit_scale = 1 / (params('batch_size', 1) or 1)
+ self.batch_bar.reset(total=total)
+ else:
+ raise KeyError('Unknown verbosity')
+
+ def on_train_end(self, *_, **__):
+ if hasattr(self, 'batch_bar'):
+ self.batch_bar.close()
+ self.epoch_bar.close()
+
+ def display(self):
+ """Displays in the current cell in Notebooks."""
+ container = getattr(self.epoch_bar, 'container', None)
+ if container is None:
+ return
+ from .notebook import display
+ display(container)
+ batch_bar = getattr(self, 'batch_bar', None)
+ if batch_bar is not None:
+ display(batch_bar.container)
+
+ @staticmethod
+ def _implements_train_batch_hooks():
+ return True
+
+ @staticmethod
+ def _implements_test_batch_hooks():
+ return True
+
+ @staticmethod
+ def _implements_predict_batch_hooks():
+ return True
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/notebook.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/notebook.py"
new file mode 100644
index 0000000..77b91bd
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/notebook.py"
@@ -0,0 +1,317 @@
+"""
+IPython/Jupyter Notebook progressbar decorator for iterators.
+Includes a default `range` iterator printing to `stderr`.
+
+Usage:
+>>> from tqdm.notebook import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+# import compatibility functions and utilities
+import re
+import sys
+from html import escape
+from weakref import proxy
+
+# to inherit from the tqdm class
+from .std import tqdm as std_tqdm
+
+if True: # pragma: no cover
+ # import IPython/Jupyter base widget and display utilities
+ IPY = 0
+ try: # IPython 4.x
+ import ipywidgets
+ IPY = 4
+ except ImportError: # IPython 3.x / 2.x
+ IPY = 32
+ import warnings
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ 'ignore', message=".*The `IPython.html` package has been deprecated.*")
+ try:
+ import IPython.html.widgets as ipywidgets # NOQA: F401
+ except ImportError:
+ pass
+
+ try: # IPython 4.x / 3.x
+ if IPY == 32:
+ from IPython.html.widgets import HTML
+ from IPython.html.widgets import FloatProgress as IProgress
+ from IPython.html.widgets import HBox
+ IPY = 3
+ else:
+ from ipywidgets import HTML
+ from ipywidgets import FloatProgress as IProgress
+ from ipywidgets import HBox
+ except ImportError:
+ try: # IPython 2.x
+ from IPython.html.widgets import HTML
+ from IPython.html.widgets import ContainerWidget as HBox
+ from IPython.html.widgets import FloatProgressWidget as IProgress
+ IPY = 2
+ except ImportError:
+ IPY = 0
+ IProgress = None
+ HBox = object
+
+ try:
+ from IPython.display import display # , clear_output
+ except ImportError:
+ pass
+
+__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]}
+__all__ = ['tqdm_notebook', 'tnrange', 'tqdm', 'trange']
+WARN_NOIPYW = ("IProgress not found. Please update jupyter and ipywidgets."
+ " See https://ipywidgets.readthedocs.io/en/stable"
+ "/user_install.html")
+
+
+class TqdmHBox(HBox):
+ """`ipywidgets.HBox` with a pretty representation"""
+ def _json_(self, pretty=None):
+ pbar = getattr(self, 'pbar', None)
+ if pbar is None:
+ return {}
+ d = pbar.format_dict
+ if pretty is not None:
+ d["ascii"] = not pretty
+ return d
+
+ def __repr__(self, pretty=False):
+ pbar = getattr(self, 'pbar', None)
+ if pbar is None:
+ return super().__repr__()
+ return pbar.format_meter(**self._json_(pretty))
+
+ def _repr_pretty_(self, pp, *_, **__):
+ pp.text(self.__repr__(True))
+
+
+class tqdm_notebook(std_tqdm):
+ """
+ Experimental IPython/Jupyter Notebook widget using tqdm!
+ """
+ @staticmethod
+ def status_printer(_, total=None, desc=None, ncols=None):
+ """
+ Manage the printing of an IPython/Jupyter Notebook progress bar widget.
+ """
+ # Fallback to text bar if there's no total
+ # DEPRECATED: replaced with an 'info' style bar
+ # if not total:
+ # return super(tqdm_notebook, tqdm_notebook).status_printer(file)
+
+ # fp = file
+
+ # Prepare IPython progress bar
+ if IProgress is None: # #187 #451 #558 #872
+ raise ImportError(WARN_NOIPYW)
+ if total:
+ pbar = IProgress(min=0, max=total)
+ else: # No total? Show info style bar with no progress tqdm status
+ pbar = IProgress(min=0, max=1)
+ pbar.value = 1
+ pbar.bar_style = 'info'
+ if ncols is None:
+ pbar.layout.width = "20px"
+
+ ltext = HTML()
+ rtext = HTML()
+ if desc:
+ ltext.value = desc
+ container = TqdmHBox(children=[ltext, pbar, rtext])
+ # Prepare layout
+ if ncols is not None: # use default style of ipywidgets
+ # ncols could be 100, "100px", "100%"
+ ncols = str(ncols) # ipywidgets only accepts string
+ try:
+ if int(ncols) > 0: # isnumeric and positive
+ ncols += 'px'
+ except ValueError:
+ pass
+ pbar.layout.flex = '2'
+ container.layout.width = ncols
+ container.layout.display = 'inline-flex'
+ container.layout.flex_flow = 'row wrap'
+
+ return container
+
+ def display(self, msg=None, pos=None,
+ # additional signals
+ close=False, bar_style=None, check_delay=True):
+ # Note: contrary to native tqdm, msg='' does NOT clear bar
+ # goal is to keep all infos if error happens so user knows
+ # at which iteration the loop failed.
+
+ # Clear previous output (really necessary?)
+ # clear_output(wait=1)
+
+ if not msg and not close:
+ d = self.format_dict
+ # remove {bar}
+ d['bar_format'] = (d['bar_format'] or "{l_bar}<bar/>{r_bar}").replace(
+ "{bar}", "<bar/>")
+ msg = self.format_meter(**d)
+
+ ltext, pbar, rtext = self.container.children
+ pbar.value = self.n
+
+ if msg:
+ msg = msg.replace(' ', u'\u2007') # fix html space padding
+ # html escape special characters (like '&')
+ if '<bar/>' in msg:
+ left, right = map(escape, re.split(r'\|?<bar/>\|?', msg, maxsplit=1))
+ else:
+ left, right = '', escape(msg)
+
+ # Update description
+ ltext.value = left
+ # never clear the bar (signal: msg='')
+ if right:
+ rtext.value = right
+
+ # Change bar style
+ if bar_style:
+ # Hack-ish way to avoid the danger bar_style being overridden by
+ # success because the bar gets closed after the error...
+ if pbar.bar_style != 'danger' or bar_style != 'success':
+ pbar.bar_style = bar_style
+
+ # Special signal to close the bar
+ if close and pbar.bar_style != 'danger': # hide only if no error
+ try:
+ self.container.close()
+ except AttributeError:
+ self.container.visible = False
+ self.container.layout.visibility = 'hidden' # IPYW>=8
+
+ if check_delay and self.delay > 0 and not self.displayed:
+ display(self.container)
+ self.displayed = True
+
+ @property
+ def colour(self):
+ if hasattr(self, 'container'):
+ return self.container.children[-2].style.bar_color
+
+ @colour.setter
+ def colour(self, bar_color):
+ if hasattr(self, 'container'):
+ self.container.children[-2].style.bar_color = bar_color
+
+ def __init__(self, *args, **kwargs):
+ """
+ Supports the usual `tqdm.tqdm` parameters as well as those listed below.
+
+ Parameters
+ ----------
+ display : Whether to call `display(self.container)` immediately
+ [default: True].
+ """
+ kwargs = kwargs.copy()
+ # Setup default output
+ file_kwarg = kwargs.get('file', sys.stderr)
+ if file_kwarg is sys.stderr or file_kwarg is None:
+ kwargs['file'] = sys.stdout # avoid the red block in IPython
+
+ # Initialize parent class + avoid printing by using gui=True
+ kwargs['gui'] = True
+ # convert disable = None to False
+ kwargs['disable'] = bool(kwargs.get('disable', False))
+ colour = kwargs.pop('colour', None)
+ display_here = kwargs.pop('display', True)
+ super().__init__(*args, **kwargs)
+ if self.disable or not kwargs['gui']:
+ self.disp = lambda *_, **__: None
+ return
+
+ # Get bar width
+ self.ncols = '100%' if self.dynamic_ncols else kwargs.get("ncols", None)
+
+ # Replace with IPython progress bar display (with correct total)
+ unit_scale = 1 if self.unit_scale is True else self.unit_scale or 1
+ total = self.total * unit_scale if self.total else self.total
+ self.container = self.status_printer(self.fp, total, self.desc, self.ncols)
+ self.container.pbar = proxy(self)
+ self.displayed = False
+ if display_here and self.delay <= 0:
+ display(self.container)
+ self.displayed = True
+ self.disp = self.display
+ self.colour = colour
+
+ # Print initial bar state
+ if not self.disable:
+ self.display(check_delay=False)
+
+ def __iter__(self):
+ try:
+ it = super().__iter__()
+ for obj in it:
+ # return super(tqdm...) will not catch exception
+ yield obj
+ # NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
+ except: # NOQA
+ self.disp(bar_style='danger')
+ raise
+ # NB: don't `finally: close()`
+ # since this could be a shared bar which the user will `reset()`
+
+ def update(self, n=1):
+ try:
+ return super().update(n=n)
+ # NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
+ except: # NOQA
+ # cannot catch KeyboardInterrupt when using manual tqdm
+ # as the interrupt will most likely happen on another statement
+ self.disp(bar_style='danger')
+ raise
+ # NB: don't `finally: close()`
+ # since this could be a shared bar which the user will `reset()`
+
+ def close(self):
+ if self.disable:
+ return
+ super().close()
+ # Try to detect if there was an error or KeyboardInterrupt
+ # in manual mode: if n < total, things probably got wrong
+ if self.total and self.n < self.total:
+ self.disp(bar_style='danger', check_delay=False)
+ else:
+ if self.leave:
+ self.disp(bar_style='success', check_delay=False)
+ else:
+ self.disp(close=True, check_delay=False)
+
+ def clear(self, *_, **__):
+ pass
+
+ def reset(self, total=None):
+ """
+ Resets to 0 iterations for repeated use.
+
+ Consider combining with `leave=True`.
+
+ Parameters
+ ----------
+ total : int or float, optional. Total to use for the new bar.
+ """
+ if self.disable:
+ return super().reset(total=total)
+ _, pbar, _ = self.container.children
+ pbar.bar_style = ''
+ if total is not None:
+ pbar.max = total
+ if not self.total and self.ncols is None: # no longer unknown total
+ pbar.layout.width = None # reset width
+ return super().reset(total=total)
+
+
+def tnrange(*args, **kwargs):
+ """Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`."""
+ return tqdm_notebook(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_notebook
+trange = tnrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/rich.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/rich.py"
new file mode 100644
index 0000000..3d392ed
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/rich.py"
@@ -0,0 +1,151 @@
+"""
+`rich.progress` decorator for iterators.
+
+Usage:
+>>> from tqdm.rich import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+from warnings import warn
+
+from rich.progress import (
+ BarColumn, Progress, ProgressColumn, Text, TimeElapsedColumn, TimeRemainingColumn, filesize)
+
+from .std import TqdmExperimentalWarning
+from .std import tqdm as std_tqdm
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['tqdm_rich', 'trrange', 'tqdm', 'trange']
+
+
+class FractionColumn(ProgressColumn):
+ """Renders completed/total, e.g. '0.5/2.3 G'."""
+ def __init__(self, unit_scale=False, unit_divisor=1000):
+ self.unit_scale = unit_scale
+ self.unit_divisor = unit_divisor
+ super().__init__()
+
+ def render(self, task):
+ """Calculate common unit for completed and total."""
+ completed = int(task.completed)
+ total = int(task.total)
+ if self.unit_scale:
+ unit, suffix = filesize.pick_unit_and_suffix(
+ total,
+ ["", "K", "M", "G", "T", "P", "E", "Z", "Y"],
+ self.unit_divisor,
+ )
+ else:
+ unit, suffix = filesize.pick_unit_and_suffix(total, [""], 1)
+ precision = 0 if unit == 1 else 1
+ return Text(
+ f"{completed/unit:,.{precision}f}/{total/unit:,.{precision}f} {suffix}",
+ style="progress.download")
+
+
+class RateColumn(ProgressColumn):
+ """Renders human readable transfer speed."""
+ def __init__(self, unit="", unit_scale=False, unit_divisor=1000):
+ self.unit = unit
+ self.unit_scale = unit_scale
+ self.unit_divisor = unit_divisor
+ super().__init__()
+
+ def render(self, task):
+ """Show data transfer speed."""
+ speed = task.speed
+ if speed is None:
+ return Text(f"? {self.unit}/s", style="progress.data.speed")
+ if self.unit_scale:
+ unit, suffix = filesize.pick_unit_and_suffix(
+ speed,
+ ["", "K", "M", "G", "T", "P", "E", "Z", "Y"],
+ self.unit_divisor,
+ )
+ else:
+ unit, suffix = filesize.pick_unit_and_suffix(speed, [""], 1)
+ precision = 0 if unit == 1 else 1
+ return Text(f"{speed/unit:,.{precision}f} {suffix}{self.unit}/s",
+ style="progress.data.speed")
+
+
+class tqdm_rich(std_tqdm): # pragma: no cover
+ """Experimental rich.progress GUI version of tqdm!"""
+ # TODO: @classmethod: write()?
+ def __init__(self, *args, **kwargs):
+ """
+ This class accepts the following parameters *in addition* to
+ the parameters accepted by `tqdm`.
+
+ Parameters
+ ----------
+ progress : tuple, optional
+ arguments for `rich.progress.Progress()`.
+ options : dict, optional
+ keyword arguments for `rich.progress.Progress()`.
+ """
+ kwargs = kwargs.copy()
+ kwargs['gui'] = True
+ # convert disable = None to False
+ kwargs['disable'] = bool(kwargs.get('disable', False))
+ progress = kwargs.pop('progress', None)
+ options = kwargs.pop('options', {}).copy()
+ super().__init__(*args, **kwargs)
+
+ if self.disable:
+ return
+
+ warn("rich is experimental/alpha", TqdmExperimentalWarning, stacklevel=2)
+ d = self.format_dict
+ if progress is None:
+ progress = (
+ "[progress.description]{task.description}"
+ "[progress.percentage]{task.percentage:>4.0f}%",
+ BarColumn(bar_width=None),
+ FractionColumn(
+ unit_scale=d['unit_scale'], unit_divisor=d['unit_divisor']),
+ "[", TimeElapsedColumn(), "<", TimeRemainingColumn(),
+ ",", RateColumn(unit=d['unit'], unit_scale=d['unit_scale'],
+ unit_divisor=d['unit_divisor']), "]"
+ )
+ options.setdefault('transient', not self.leave)
+ self._prog = Progress(*progress, **options)
+ self._prog.__enter__()
+ self._task_id = self._prog.add_task(self.desc or "", **d)
+
+ def close(self):
+ if self.disable:
+ return
+ self.display() # print 100%, vis #1306
+ super().close()
+ self._prog.__exit__(None, None, None)
+
+ def clear(self, *_, **__):
+ pass
+
+ def display(self, *_, **__):
+ if not hasattr(self, '_prog'):
+ return
+ self._prog.update(self._task_id, completed=self.n, description=self.desc)
+
+ def reset(self, total=None):
+ """
+ Resets to 0 iterations for repeated use.
+
+ Parameters
+ ----------
+ total : int or float, optional. Total to use for the new bar.
+ """
+ if hasattr(self, '_prog'):
+ self._prog.reset(total=total)
+ super().reset(total=total)
+
+
+def trrange(*args, **kwargs):
+ """Shortcut for `tqdm.rich.tqdm(range(*args), **kwargs)`."""
+ return tqdm_rich(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_rich
+trange = trrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/std.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/std.py"
new file mode 100644
index 0000000..e91ad30
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/std.py"
@@ -0,0 +1,1524 @@
+"""
+Customisable progressbar decorator for iterators.
+Includes a default `range` iterator printing to `stderr`.
+
+Usage:
+>>> from tqdm import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+import sys
+from collections import OrderedDict, defaultdict
+from contextlib import contextmanager
+from datetime import datetime, timedelta, timezone
+from numbers import Number
+from time import time
+from warnings import warn
+from weakref import WeakSet
+
+from ._monitor import TMonitor
+from .utils import (
+ CallbackIOWrapper, Comparable, DisableOnWriteError, FormatReplace, SimpleTextIOWrapper,
+ _is_ascii, _screen_shape_wrapper, _supports_unicode, _term_move_up, disp_len, disp_trim,
+ envwrap)
+
+__author__ = "https://github.com/tqdm/tqdm#contributions"
+__all__ = ['tqdm', 'trange',
+ 'TqdmTypeError', 'TqdmKeyError', 'TqdmWarning',
+ 'TqdmExperimentalWarning', 'TqdmDeprecationWarning',
+ 'TqdmMonitorWarning']
+
+
+class TqdmTypeError(TypeError):
+ pass
+
+
+class TqdmKeyError(KeyError):
+ pass
+
+
+class TqdmWarning(Warning):
+ """base class for all tqdm warnings.
+
+ Used for non-external-code-breaking errors, such as garbled printing.
+ """
+ def __init__(self, msg, fp_write=None, *a, **k):
+ if fp_write is not None:
+ fp_write("\n" + self.__class__.__name__ + ": " + str(msg).rstrip() + '\n')
+ else:
+ super().__init__(msg, *a, **k)
+
+
+class TqdmExperimentalWarning(TqdmWarning, FutureWarning):
+ """beta feature, unstable API and behaviour"""
+ pass
+
+
+class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning):
+ # not suppressed if raised
+ pass
+
+
+class TqdmMonitorWarning(TqdmWarning, RuntimeWarning):
+ """tqdm monitor errors which do not affect external functionality"""
+ pass
+
+
+def TRLock(*args, **kwargs):
+ """threading RLock"""
+ try:
+ from threading import RLock
+ return RLock(*args, **kwargs)
+ except (ImportError, OSError): # pragma: no cover
+ pass
+
+
+class TqdmDefaultWriteLock(object):
+ """
+ Provide a default write lock for thread and multiprocessing safety.
+ Works only on platforms supporting `fork` (so Windows is excluded).
+ You must initialise a `tqdm` or `TqdmDefaultWriteLock` instance
+ before forking in order for the write lock to work.
+ On Windows, you need to supply the lock from the parent to the children as
+ an argument to joblib or the parallelism lib you use.
+ """
+ # global thread lock so no setup required for multithreading.
+ # NB: Do not create multiprocessing lock as it sets the multiprocessing
+ # context, disallowing `spawn()`/`forkserver()`
+ th_lock = TRLock()
+
+ def __init__(self):
+ # Create global parallelism locks to avoid racing issues with parallel
+ # bars works only if fork available (Linux/MacOSX, but not Windows)
+ cls = type(self)
+ root_lock = cls.th_lock
+ if root_lock is not None:
+ root_lock.acquire()
+ cls.create_mp_lock()
+ self.locks = [lk for lk in [cls.mp_lock, cls.th_lock] if lk is not None]
+ if root_lock is not None:
+ root_lock.release()
+
+ def acquire(self, *a, **k):
+ for lock in self.locks:
+ lock.acquire(*a, **k)
+
+ def release(self):
+ for lock in self.locks[::-1]: # Release in inverse order of acquisition
+ lock.release()
+
+ def __enter__(self):
+ self.acquire()
+
+ def __exit__(self, *exc):
+ self.release()
+
+ @classmethod
+ def create_mp_lock(cls):
+ if not hasattr(cls, 'mp_lock'):
+ try:
+ from multiprocessing import RLock
+ cls.mp_lock = RLock()
+ except (ImportError, OSError): # pragma: no cover
+ cls.mp_lock = None
+
+ @classmethod
+ def create_th_lock(cls):
+ assert hasattr(cls, 'th_lock')
+ warn("create_th_lock not needed anymore", TqdmDeprecationWarning, stacklevel=2)
+
+
+class Bar(object):
+ """
+ `str.format`-able bar with format specifiers: `[width][type]`
+
+ - `width`
+ + unspecified (default): use `self.default_len`
+ + `int >= 0`: overrides `self.default_len`
+ + `int < 0`: subtract from `self.default_len`
+ - `type`
+ + `a`: ascii (`charset=self.ASCII` override)
+ + `u`: unicode (`charset=self.UTF` override)
+ + `b`: blank (`charset=" "` override)
+ """
+ ASCII = " 123456789#"
+ UTF = u" " + u''.join(map(chr, range(0x258F, 0x2587, -1)))
+ BLANK = " "
+ COLOUR_RESET = '\x1b[0m'
+ COLOUR_RGB = '\x1b[38;2;%d;%d;%dm'
+ COLOURS = {'BLACK': '\x1b[30m', 'RED': '\x1b[31m', 'GREEN': '\x1b[32m',
+ 'YELLOW': '\x1b[33m', 'BLUE': '\x1b[34m', 'MAGENTA': '\x1b[35m',
+ 'CYAN': '\x1b[36m', 'WHITE': '\x1b[37m'}
+
+ def __init__(self, frac, default_len=10, charset=UTF, colour=None):
+ if not 0 <= frac <= 1:
+ warn("clamping frac to range [0, 1]", TqdmWarning, stacklevel=2)
+ frac = max(0, min(1, frac))
+ assert default_len > 0
+ self.frac = frac
+ self.default_len = default_len
+ self.charset = charset
+ self.colour = colour
+
+ @property
+ def colour(self):
+ return self._colour
+
+ @colour.setter
+ def colour(self, value):
+ if not value:
+ self._colour = None
+ return
+ try:
+ if value.upper() in self.COLOURS:
+ self._colour = self.COLOURS[value.upper()]
+ elif value[0] == '#' and len(value) == 7:
+ self._colour = self.COLOUR_RGB % tuple(
+ int(i, 16) for i in (value[1:3], value[3:5], value[5:7]))
+ else:
+ raise KeyError
+ except (KeyError, AttributeError):
+ warn("Unknown colour (%s); valid choices: [hex (#00ff00), %s]" % (
+ value, ", ".join(self.COLOURS)),
+ TqdmWarning, stacklevel=2)
+ self._colour = None
+
+ def __format__(self, format_spec):
+ if format_spec:
+ _type = format_spec[-1].lower()
+ try:
+ charset = {'a': self.ASCII, 'u': self.UTF, 'b': self.BLANK}[_type]
+ except KeyError:
+ charset = self.charset
+ else:
+ format_spec = format_spec[:-1]
+ if format_spec:
+ N_BARS = int(format_spec)
+ if N_BARS < 0:
+ N_BARS += self.default_len
+ else:
+ N_BARS = self.default_len
+ else:
+ charset = self.charset
+ N_BARS = self.default_len
+
+ nsyms = len(charset) - 1
+ bar_length, frac_bar_length = divmod(int(self.frac * N_BARS * nsyms), nsyms)
+
+ res = charset[-1] * bar_length
+ if bar_length < N_BARS: # whitespace padding
+ res = res + charset[frac_bar_length] + charset[0] * (N_BARS - bar_length - 1)
+ return self.colour + res + self.COLOUR_RESET if self.colour else res
+
+
+class EMA(object):
+ """
+ Exponential moving average: smoothing to give progressively lower
+ weights to older values.
+
+ Parameters
+ ----------
+ smoothing : float, optional
+ Smoothing factor in range [0, 1], [default: 0.3].
+ Increase to give more weight to recent values.
+ Ranges from 0 (yields old value) to 1 (yields new value).
+ """
+ def __init__(self, smoothing=0.3):
+ self.alpha = smoothing
+ self.last = 0
+ self.calls = 0
+
+ def __call__(self, x=None):
+ """
+ Parameters
+ ----------
+ x : float
+ New value to include in EMA.
+ """
+ beta = 1 - self.alpha
+ if x is not None:
+ self.last = self.alpha * x + beta * self.last
+ self.calls += 1
+ return self.last / (1 - beta ** self.calls) if self.calls else self.last
+
+
+class tqdm(Comparable):
+ """
+ Decorate an iterable object, returning an iterator which acts exactly
+ like the original iterable, but prints a dynamically updating
+ progressbar every time a value is requested.
+
+ Parameters
+ ----------
+ iterable : iterable, optional
+ Iterable to decorate with a progressbar.
+ Leave blank to manually manage the updates.
+ desc : str, optional
+ Prefix for the progressbar.
+ total : int or float, optional
+ The number of expected iterations. If unspecified,
+ len(iterable) is used if possible. If float("inf") or as a last
+ resort, only basic progress statistics are displayed
+ (no ETA, no progressbar).
+ If `gui` is True and this parameter needs subsequent updating,
+ specify an initial arbitrary large positive number,
+ e.g. 9e9.
+ leave : bool, optional
+ If [default: True], keeps all traces of the progressbar
+ upon termination of iteration.
+ If `None`, will leave only if `position` is `0`.
+ file : `io.TextIOWrapper` or `io.StringIO`, optional
+ Specifies where to output the progress messages
+ (default: sys.stderr). Uses `file.write(str)` and `file.flush()`
+ methods. For encoding, see `write_bytes`.
+ ncols : int, optional
+ The width of the entire output message. If specified,
+ dynamically resizes the progressbar to stay within this bound.
+ If unspecified, attempts to use environment width. The
+ fallback is a meter width of 10 and no limit for the counter and
+ statistics. If 0, will not print any meter (only stats).
+ mininterval : float, optional
+ Minimum progress display update interval [default: 0.1] seconds.
+ maxinterval : float, optional
+ Maximum progress display update interval [default: 10] seconds.
+ Automatically adjusts `miniters` to correspond to `mininterval`
+ after long display update lag. Only works if `dynamic_miniters`
+ or monitor thread is enabled.
+ miniters : int or float, optional
+ Minimum progress display update interval, in iterations.
+ If 0 and `dynamic_miniters`, will automatically adjust to equal
+ `mininterval` (more CPU efficient, good for tight loops).
+ If > 0, will skip display of specified number of iterations.
+ Tweak this and `mininterval` to get very efficient loops.
+ If your progress is erratic with both fast and slow iterations
+ (network, skipping items, etc) you should set miniters=1.
+ ascii : bool or str, optional
+ If unspecified or False, use unicode (smooth blocks) to fill
+ the meter. The fallback is to use ASCII characters " 123456789#".
+ disable : bool, optional
+ Whether to disable the entire progressbar wrapper
+ [default: False]. If set to None, disable on non-TTY.
+ unit : str, optional
+ String that will be used to define the unit of each iteration
+ [default: it].
+ unit_scale : bool or int or float, optional
+ If 1 or True, the number of iterations will be reduced/scaled
+ automatically and a metric prefix following the
+ International System of Units standard will be added
+ (kilo, mega, etc.) [default: False]. If any other non-zero
+ number, will scale `total` and `n`.
+ dynamic_ncols : bool, optional
+ If set, constantly alters `ncols` and `nrows` to the
+ environment (allowing for window resizes) [default: False].
+ smoothing : float, optional
+ Exponential moving average smoothing factor for speed estimates
+ (ignored in GUI mode). Ranges from 0 (average speed) to 1
+ (current/instantaneous speed) [default: 0.3].
+ bar_format : str, optional
+ Specify a custom bar string formatting. May impact performance.
+ [default: '{l_bar}{bar}{r_bar}'], where
+ l_bar='{desc}: {percentage:3.0f}%|' and
+ r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
+ '{rate_fmt}{postfix}]'
+ Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
+ percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
+ rate, rate_fmt, rate_noinv, rate_noinv_fmt,
+ rate_inv, rate_inv_fmt, postfix, unit_divisor,
+ remaining, remaining_s, eta.
+ Note that a trailing ": " is automatically removed after {desc}
+ if the latter is empty.
+ initial : int or float, optional
+ The initial counter value. Useful when restarting a progress
+ bar [default: 0]. If using float, consider specifying `{n:.3f}`
+ or similar in `bar_format`, or specifying `unit_scale`.
+ position : int, optional
+ Specify the line offset to print this bar (starting from 0)
+ Automatic if unspecified.
+ Useful to manage multiple bars at once (eg, from threads).
+ postfix : dict or *, optional
+ Specify additional stats to display at the end of the bar.
+ Calls `set_postfix(**postfix)` if possible (dict).
+ unit_divisor : float, optional
+ [default: 1000], ignored unless `unit_scale` is True.
+ write_bytes : bool, optional
+ Whether to write bytes. If (default: False) will write unicode.
+ lock_args : tuple, optional
+ Passed to `refresh` for intermediate output
+ (initialisation, iterating, and updating).
+ nrows : int, optional
+ The screen height. If specified, hides nested bars outside this
+ bound. If unspecified, attempts to use environment height.
+ The fallback is 20.
+ colour : str, optional
+ Bar colour (e.g. 'green', '#00ff00').
+ delay : float, optional
+ Don't display until [default: 0] seconds have elapsed.
+ gui : bool, optional
+ WARNING: internal parameter - do not use.
+ Use tqdm.gui.tqdm(...) instead. If set, will attempt to use
+ matplotlib animations for a graphical output [default: False].
+
+ Returns
+ -------
+ out : decorated iterator.
+ """
+
+ monitor_interval = 10 # set to 0 to disable the thread
+ monitor = None
+ _instances = WeakSet()
+
+ @staticmethod
+ def format_sizeof(num, suffix='', divisor=1000):
+ """
+ Formats a number (greater than unity) with SI Order of Magnitude
+ prefixes.
+
+ Parameters
+ ----------
+ num : float
+ Number ( >= 1) to format.
+ suffix : str, optional
+ Post-postfix [default: ''].
+ divisor : float, optional
+ Divisor between prefixes [default: 1000].
+
+ Returns
+ -------
+ out : str
+ Number with Order of Magnitude SI unit postfix.
+ """
+ for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z']:
+ if abs(num) < 999.5:
+ if abs(num) < 99.95:
+ if abs(num) < 9.995:
+ return f'{num:1.2f}{unit}{suffix}'
+ return f'{num:2.1f}{unit}{suffix}'
+ return f'{num:3.0f}{unit}{suffix}'
+ num /= divisor
+ return f'{num:3.1f}Y{suffix}'
+
+ @staticmethod
+ def format_interval(t):
+ """
+ Formats a number of seconds as a clock time, [H:]MM:SS
+
+ Parameters
+ ----------
+ t : int
+ Number of seconds.
+
+ Returns
+ -------
+ out : str
+ [H:]MM:SS
+ """
+ mins, s = divmod(int(t), 60)
+ h, m = divmod(mins, 60)
+ return f'{h:d}:{m:02d}:{s:02d}' if h else f'{m:02d}:{s:02d}'
+
+ @staticmethod
+ def format_num(n):
+ """
+ Intelligent scientific notation (.3g).
+
+ Parameters
+ ----------
+ n : int or float or Numeric
+ A Number.
+
+ Returns
+ -------
+ out : str
+ Formatted number.
+ """
+ f = f'{n:.3g}'.replace('e+0', 'e+').replace('e-0', 'e-')
+ n = str(n)
+ return f if len(f) < len(n) else n
+
+ @staticmethod
+ def status_printer(file):
+ """
+ Manage the printing and in-place updating of a line of characters.
+ Note that if the string is longer than a line, then in-place
+ updating may not work (it will print a new line at each refresh).
+ """
+ fp = file
+ fp_flush = getattr(fp, 'flush', lambda: None) # pragma: no cover
+ if fp in (sys.stderr, sys.stdout):
+ getattr(sys.stderr, 'flush', lambda: None)()
+ getattr(sys.stdout, 'flush', lambda: None)()
+
+ def fp_write(s):
+ fp.write(str(s))
+ fp_flush()
+
+ last_len = [0]
+
+ def print_status(s):
+ len_s = disp_len(s)
+ fp_write('\r' + s + (' ' * max(last_len[0] - len_s, 0)))
+ last_len[0] = len_s
+
+ return print_status
+
+ @staticmethod
+ def format_meter(n, total, elapsed, ncols=None, prefix='', ascii=False, unit='it',
+ unit_scale=False, rate=None, bar_format=None, postfix=None,
+ unit_divisor=1000, initial=0, colour=None, **extra_kwargs):
+ """
+ Return a string-based progress bar given some parameters
+
+ Parameters
+ ----------
+ n : int or float
+ Number of finished iterations.
+ total : int or float
+ The expected total number of iterations. If meaningless (None),
+ only basic progress statistics are displayed (no ETA).
+ elapsed : float
+ Number of seconds passed since start.
+ ncols : int, optional
+ The width of the entire output message. If specified,
+ dynamically resizes `{bar}` to stay within this bound
+ [default: None]. If `0`, will not print any bar (only stats).
+ The fallback is `{bar:10}`.
+ prefix : str, optional
+ Prefix message (included in total width) [default: ''].
+ Use as {desc} in bar_format string.
+ ascii : bool, optional or str, optional
+ If not set, use unicode (smooth blocks) to fill the meter
+ [default: False]. The fallback is to use ASCII characters
+ " 123456789#".
+ unit : str, optional
+ The iteration unit [default: 'it'].
+ unit_scale : bool or int or float, optional
+ If 1 or True, the number of iterations will be printed with an
+ appropriate SI metric prefix (k = 10^3, M = 10^6, etc.)
+ [default: False]. If any other non-zero number, will scale
+ `total` and `n`.
+ rate : float, optional
+ Manual override for iteration rate.
+ If [default: None], uses n/elapsed.
+ bar_format : str, optional
+ Specify a custom bar string formatting. May impact performance.
+ [default: '{l_bar}{bar}{r_bar}'], where
+ l_bar='{desc}: {percentage:3.0f}%|' and
+ r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
+ '{rate_fmt}{postfix}]'
+ Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
+ percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
+ rate, rate_fmt, rate_noinv, rate_noinv_fmt,
+ rate_inv, rate_inv_fmt, postfix, unit_divisor,
+ remaining, remaining_s, eta.
+ Note that a trailing ": " is automatically removed after {desc}
+ if the latter is empty.
+ postfix : *, optional
+ Similar to `prefix`, but placed at the end
+ (e.g. for additional stats).
+ Note: postfix is usually a string (not a dict) for this method,
+ and will if possible be set to postfix = ', ' + postfix.
+ However other types are supported (#382).
+ unit_divisor : float, optional
+ [default: 1000], ignored unless `unit_scale` is True.
+ initial : int or float, optional
+ The initial counter value [default: 0].
+ colour : str, optional
+ Bar colour (e.g. 'green', '#00ff00').
+
+ Returns
+ -------
+ out : Formatted meter and stats, ready to display.
+ """
+
+ # sanity check: total
+ if total and n >= (total + 0.5): # allow float imprecision (#849)
+ total = None
+
+ # apply custom scale if necessary
+ if unit_scale and unit_scale not in (True, 1):
+ if total:
+ total *= unit_scale
+ n *= unit_scale
+ if rate:
+ rate *= unit_scale # by default rate = self.avg_dn / self.avg_dt
+ unit_scale = False
+
+ elapsed_str = tqdm.format_interval(elapsed)
+
+ # if unspecified, attempt to use rate = average speed
+ # (we allow manual override since predicting time is an arcane art)
+ if rate is None and elapsed:
+ rate = (n - initial) / elapsed
+ inv_rate = 1 / rate if rate else None
+ format_sizeof = tqdm.format_sizeof
+ rate_noinv_fmt = ((format_sizeof(rate) if unit_scale else f'{rate:5.2f}')
+ if rate else '?') + unit + '/s'
+ rate_inv_fmt = (
+ (format_sizeof(inv_rate) if unit_scale else f'{inv_rate:5.2f}')
+ if inv_rate else '?') + 's/' + unit
+ rate_fmt = rate_inv_fmt if inv_rate and inv_rate > 1 else rate_noinv_fmt
+
+ if unit_scale:
+ n_fmt = format_sizeof(n, divisor=unit_divisor)
+ total_fmt = format_sizeof(total, divisor=unit_divisor) if total is not None else '?'
+ else:
+ n_fmt = str(n)
+ total_fmt = str(total) if total is not None else '?'
+
+ try:
+ postfix = ', ' + postfix if postfix else ''
+ except TypeError:
+ pass
+
+ remaining = (total - n) / rate if rate and total else 0
+ remaining_str = tqdm.format_interval(remaining) if rate else '?'
+ try:
+ eta_dt = (datetime.now() + timedelta(seconds=remaining)
+ if rate and total else datetime.fromtimestamp(0, timezone.utc))
+ except OverflowError:
+ eta_dt = datetime.max
+
+ # format the stats displayed to the left and right sides of the bar
+ if prefix:
+ # old prefix setup work around
+ bool_prefix_colon_already = (prefix[-2:] == ": ")
+ l_bar = prefix if bool_prefix_colon_already else prefix + ": "
+ else:
+ l_bar = ''
+
+ r_bar = f'| {n_fmt}/{total_fmt} [{elapsed_str}<{remaining_str}, {rate_fmt}{postfix}]'
+
+ # Custom bar formatting
+ # Populate a dict with all available progress indicators
+ format_dict = {
+ # slight extension of self.format_dict
+ 'n': n, 'n_fmt': n_fmt, 'total': total, 'total_fmt': total_fmt,
+ 'elapsed': elapsed_str, 'elapsed_s': elapsed,
+ 'ncols': ncols, 'desc': prefix or '', 'unit': unit,
+ 'rate': inv_rate if inv_rate and inv_rate > 1 else rate,
+ 'rate_fmt': rate_fmt, 'rate_noinv': rate,
+ 'rate_noinv_fmt': rate_noinv_fmt, 'rate_inv': inv_rate,
+ 'rate_inv_fmt': rate_inv_fmt,
+ 'postfix': postfix, 'unit_divisor': unit_divisor,
+ 'colour': colour,
+ # plus more useful definitions
+ 'remaining': remaining_str, 'remaining_s': remaining,
+ 'l_bar': l_bar, 'r_bar': r_bar, 'eta': eta_dt,
+ **extra_kwargs}
+
+ # total is known: we can predict some stats
+ if total:
+ # fractional and percentage progress
+ frac = n / total
+ percentage = frac * 100
+
+ l_bar += f'{percentage:3.0f}%|'
+
+ if ncols == 0:
+ return l_bar[:-1] + r_bar[1:]
+
+ format_dict.update(l_bar=l_bar)
+ if bar_format:
+ format_dict.update(percentage=percentage)
+
+ # auto-remove colon for empty `{desc}`
+ if not prefix:
+ bar_format = bar_format.replace("{desc}: ", '')
+ else:
+ bar_format = "{l_bar}{bar}{r_bar}"
+
+ full_bar = FormatReplace()
+ nobar = bar_format.format(bar=full_bar, **format_dict)
+ if not full_bar.format_called:
+ return nobar # no `{bar}`; nothing else to do
+
+ # Formatting progress bar space available for bar's display
+ full_bar = Bar(frac,
+ max(1, ncols - disp_len(nobar)) if ncols else 10,
+ charset=Bar.ASCII if ascii is True else ascii or Bar.UTF,
+ colour=colour)
+ if not _is_ascii(full_bar.charset) and _is_ascii(bar_format):
+ bar_format = str(bar_format)
+ res = bar_format.format(bar=full_bar, **format_dict)
+ return disp_trim(res, ncols) if ncols else res
+
+ elif bar_format:
+ # user-specified bar_format but no total
+ l_bar += '|'
+ format_dict.update(l_bar=l_bar, percentage=0)
+ full_bar = FormatReplace()
+ nobar = bar_format.format(bar=full_bar, **format_dict)
+ if not full_bar.format_called:
+ return nobar
+ full_bar = Bar(0,
+ max(1, ncols - disp_len(nobar)) if ncols else 10,
+ charset=Bar.BLANK, colour=colour)
+ res = bar_format.format(bar=full_bar, **format_dict)
+ return disp_trim(res, ncols) if ncols else res
+ else:
+ # no total: no progressbar, ETA, just progress stats
+ return (f'{(prefix + ": ") if prefix else ""}'
+ f'{n_fmt}{unit} [{elapsed_str}, {rate_fmt}{postfix}]')
+
+ def __new__(cls, *_, **__):
+ instance = object.__new__(cls)
+ with cls.get_lock(): # also constructs lock if non-existent
+ cls._instances.add(instance)
+ # create monitoring thread
+ if cls.monitor_interval and (cls.monitor is None
+ or not cls.monitor.report()):
+ try:
+ cls.monitor = TMonitor(cls, cls.monitor_interval)
+ except Exception as e: # pragma: nocover
+ warn("tqdm:disabling monitor support"
+ " (monitor_interval = 0) due to:\n" + str(e),
+ TqdmMonitorWarning, stacklevel=2)
+ cls.monitor_interval = 0
+ return instance
+
+ @classmethod
+ def _get_free_pos(cls, instance=None):
+ """Skips specified instance."""
+ positions = {abs(inst.pos) for inst in cls._instances
+ if inst is not instance and hasattr(inst, "pos")}
+ return min(set(range(len(positions) + 1)).difference(positions))
+
+ @classmethod
+ def _decr_instances(cls, instance):
+ """
+ Remove from list and reposition another unfixed bar
+ to fill the new gap.
+
+ This means that by default (where all nested bars are unfixed),
+ order is not maintained but screen flicker/blank space is minimised.
+ (tqdm<=4.44.1 moved ALL subsequent unfixed bars up.)
+ """
+ with cls._lock:
+ try:
+ cls._instances.remove(instance)
+ except KeyError:
+ # if not instance.gui: # pragma: no cover
+ # raise
+ pass # py2: maybe magically removed already
+ # else:
+ if not instance.gui:
+ last = (instance.nrows or 20) - 1
+ # find unfixed (`pos >= 0`) overflow (`pos >= nrows - 1`)
+ instances = list(filter(
+ lambda i: hasattr(i, "pos") and last <= i.pos,
+ cls._instances))
+ # set first found to current `pos`
+ if instances:
+ inst = min(instances, key=lambda i: i.pos)
+ inst.clear(nolock=True)
+ inst.pos = abs(instance.pos)
+
+ @classmethod
+ def write(cls, s, file=None, end="\n", nolock=False):
+ """Print a message via tqdm (without overlap with bars)."""
+ fp = file if file is not None else sys.stdout
+ with cls.external_write_mode(file=file, nolock=nolock):
+ # Write the message
+ fp.write(s)
+ fp.write(end)
+
+ @classmethod
+ @contextmanager
+ def external_write_mode(cls, file=None, nolock=False):
+ """
+ Disable tqdm within context and refresh tqdm when exits.
+ Useful when writing to standard output stream
+ """
+ fp = file if file is not None else sys.stdout
+
+ try:
+ if not nolock:
+ cls.get_lock().acquire()
+ # Clear all bars
+ inst_cleared = []
+ for inst in getattr(cls, '_instances', []):
+ # Clear instance if in the target output file
+ # or if write output + tqdm output are both either
+ # sys.stdout or sys.stderr (because both are mixed in terminal)
+ if hasattr(inst, "start_t") and (inst.fp == fp or all(
+ f in (sys.stdout, sys.stderr) for f in (fp, inst.fp))):
+ inst.clear(nolock=True)
+ inst_cleared.append(inst)
+ yield
+ # Force refresh display of bars we cleared
+ for inst in inst_cleared:
+ inst.refresh(nolock=True)
+ finally:
+ if not nolock:
+ cls._lock.release()
+
+ @classmethod
+ def set_lock(cls, lock):
+ """Set the global lock."""
+ cls._lock = lock
+
+ @classmethod
+ def get_lock(cls):
+ """Get the global lock. Construct it if it does not exist."""
+ if not hasattr(cls, '_lock'):
+ cls._lock = TqdmDefaultWriteLock()
+ return cls._lock
+
+ @classmethod
+ def pandas(cls, **tqdm_kwargs):
+ """
+ Registers the current `tqdm` class with
+ pandas.core.
+ ( frame.DataFrame
+ | series.Series
+ | groupby.(generic.)DataFrameGroupBy
+ | groupby.(generic.)SeriesGroupBy
+ ).progress_apply
+
+ A new instance will be created every time `progress_apply` is called,
+ and each instance will automatically `close()` upon completion.
+
+ Parameters
+ ----------
+ tqdm_kwargs : arguments for the tqdm instance
+
+ Examples
+ --------
+ >>> import pandas as pd
+ >>> import numpy as np
+ >>> from tqdm import tqdm
+ >>> from tqdm.gui import tqdm as tqdm_gui
+ >>>
+ >>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
+ >>> tqdm.pandas(ncols=50) # can use tqdm_gui, optional kwargs, etc
+ >>> # Now you can use `progress_apply` instead of `apply`
+ >>> df.groupby(0).progress_apply(lambda x: x**2)
+
+ References
+ ----------
+ <https://stackoverflow.com/questions/18603270/\
+ progress-indicator-during-pandas-operations-python>
+ """
+ from warnings import catch_warnings, simplefilter
+
+ from pandas.core.frame import DataFrame
+ from pandas.core.series import Series
+ try:
+ with catch_warnings():
+ simplefilter("ignore", category=FutureWarning)
+ from pandas import Panel
+ except ImportError: # pandas>=1.2.0
+ Panel = None
+ Rolling, Expanding = None, None
+ try: # pandas>=1.0.0
+ from pandas.core.window.rolling import _Rolling_and_Expanding
+ except ImportError:
+ try: # pandas>=0.18.0
+ from pandas.core.window import _Rolling_and_Expanding
+ except ImportError: # pandas>=1.2.0
+ try: # pandas>=1.2.0
+ from pandas.core.window.expanding import Expanding
+ from pandas.core.window.rolling import Rolling
+ _Rolling_and_Expanding = Rolling, Expanding
+ except ImportError: # pragma: no cover
+ _Rolling_and_Expanding = None
+ try: # pandas>=0.25.0
+ from pandas.core.groupby.generic import SeriesGroupBy # , NDFrameGroupBy
+ from pandas.core.groupby.generic import DataFrameGroupBy
+ except ImportError: # pragma: no cover
+ try: # pandas>=0.23.0
+ from pandas.core.groupby.groupby import DataFrameGroupBy, SeriesGroupBy
+ except ImportError:
+ from pandas.core.groupby import DataFrameGroupBy, SeriesGroupBy
+ try: # pandas>=0.23.0
+ from pandas.core.groupby.groupby import GroupBy
+ except ImportError: # pragma: no cover
+ from pandas.core.groupby import GroupBy
+
+ try: # pandas>=0.23.0
+ from pandas.core.groupby.groupby import PanelGroupBy
+ except ImportError:
+ try:
+ from pandas.core.groupby import PanelGroupBy
+ except ImportError: # pandas>=0.25.0
+ PanelGroupBy = None
+
+ tqdm_kwargs = tqdm_kwargs.copy()
+ deprecated_t = [tqdm_kwargs.pop('deprecated_t', None)]
+
+ def inner_generator(df_function='apply'):
+ def inner(df, func, *args, **kwargs):
+ """
+ Parameters
+ ----------
+ df : (DataFrame|Series)[GroupBy]
+ Data (may be grouped).
+ func : function
+ To be applied on the (grouped) data.
+ **kwargs : optional
+ Transmitted to `df.apply()`.
+ """
+
+ # Precompute total iterations
+ total = tqdm_kwargs.pop("total", getattr(df, 'ngroups', None))
+ if total is None: # not grouped
+ if df_function == 'applymap':
+ total = df.size
+ elif isinstance(df, Series):
+ total = len(df)
+ elif (_Rolling_and_Expanding is None or
+ not isinstance(df, _Rolling_and_Expanding)):
+ # DataFrame or Panel
+ axis = kwargs.get('axis', 0)
+ if axis == 'index':
+ axis = 0
+ elif axis == 'columns':
+ axis = 1
+ # when axis=0, total is shape[axis1]
+ total = df.size // df.shape[axis]
+
+ # Init bar
+ if deprecated_t[0] is not None:
+ t = deprecated_t[0]
+ deprecated_t[0] = None
+ else:
+ t = cls(total=total, **tqdm_kwargs)
+
+ if len(args) > 0:
+ # *args intentionally not supported (see #244, #299)
+ TqdmDeprecationWarning(
+ "Except func, normal arguments are intentionally" +
+ " not supported by" +
+ " `(DataFrame|Series|GroupBy).progress_apply`." +
+ " Use keyword arguments instead.",
+ fp_write=getattr(t.fp, 'write', sys.stderr.write))
+
+ try: # pandas>=1.3.0
+ from pandas.core.common import is_builtin_func
+ except ImportError:
+ is_builtin_func = df._is_builtin_func
+ try:
+ func = is_builtin_func(func)
+ except TypeError:
+ pass
+
+ # Define bar updating wrapper
+ def wrapper(*args, **kwargs):
+ # update tbar correctly
+ # it seems `pandas apply` calls `func` twice
+ # on the first column/row to decide whether it can
+ # take a fast or slow code path; so stop when t.total==t.n
+ t.update(n=1 if not t.total or t.n < t.total else 0)
+ return func(*args, **kwargs)
+
+ # Apply the provided function (in **kwargs)
+ # on the df using our wrapper (which provides bar updating)
+ try:
+ return getattr(df, df_function)(wrapper, **kwargs)
+ finally:
+ t.close()
+
+ return inner
+
+ # Monkeypatch pandas to provide easy methods
+ # Enable custom tqdm progress in pandas!
+ Series.progress_apply = inner_generator()
+ SeriesGroupBy.progress_apply = inner_generator()
+ Series.progress_map = inner_generator('map')
+ SeriesGroupBy.progress_map = inner_generator('map')
+
+ DataFrame.progress_apply = inner_generator()
+ DataFrameGroupBy.progress_apply = inner_generator()
+ DataFrame.progress_applymap = inner_generator('applymap')
+ DataFrame.progress_map = inner_generator('map')
+ DataFrameGroupBy.progress_map = inner_generator('map')
+
+ if Panel is not None:
+ Panel.progress_apply = inner_generator()
+ if PanelGroupBy is not None:
+ PanelGroupBy.progress_apply = inner_generator()
+
+ GroupBy.progress_apply = inner_generator()
+ GroupBy.progress_aggregate = inner_generator('aggregate')
+ GroupBy.progress_transform = inner_generator('transform')
+
+ if Rolling is not None and Expanding is not None:
+ Rolling.progress_apply = inner_generator()
+ Expanding.progress_apply = inner_generator()
+ elif _Rolling_and_Expanding is not None:
+ _Rolling_and_Expanding.progress_apply = inner_generator()
+
+ # override defaults via env vars
+ @envwrap("TQDM_", is_method=True, types={'total': float, 'ncols': int, 'miniters': float,
+ 'position': int, 'nrows': int})
+ def __init__(self, iterable=None, desc=None, total=None, leave=True, file=None,
+ ncols=None, mininterval=0.1, maxinterval=10.0, miniters=None,
+ ascii=None, disable=False, unit='it', unit_scale=False,
+ dynamic_ncols=False, smoothing=0.3, bar_format=None, initial=0,
+ position=None, postfix=None, unit_divisor=1000, write_bytes=False,
+ lock_args=None, nrows=None, colour=None, delay=0.0, gui=False,
+ **kwargs):
+ """see tqdm.tqdm for arguments"""
+ if file is None:
+ file = sys.stderr
+
+ if write_bytes:
+ # Despite coercing unicode into bytes, py2 sys.std* streams
+ # should have bytes written to them.
+ file = SimpleTextIOWrapper(
+ file, encoding=getattr(file, 'encoding', None) or 'utf-8')
+
+ file = DisableOnWriteError(file, tqdm_instance=self)
+
+ if disable is None and hasattr(file, "isatty") and not file.isatty():
+ disable = True
+
+ if total is None and iterable is not None:
+ try:
+ total = len(iterable)
+ except (TypeError, AttributeError):
+ total = None
+ if total == float("inf"):
+ # Infinite iterations, behave same as unknown
+ total = None
+
+ if disable:
+ self.iterable = iterable
+ self.disable = disable
+ with self._lock:
+ self.pos = self._get_free_pos(self)
+ self._instances.remove(self)
+ self.n = initial
+ self.total = total
+ self.leave = leave
+ return
+
+ if kwargs:
+ self.disable = True
+ with self._lock:
+ self.pos = self._get_free_pos(self)
+ self._instances.remove(self)
+ raise (
+ TqdmDeprecationWarning(
+ "`nested` is deprecated and automated.\n"
+ "Use `position` instead for manual control.\n",
+ fp_write=getattr(file, 'write', sys.stderr.write))
+ if "nested" in kwargs else
+ TqdmKeyError("Unknown argument(s): " + str(kwargs)))
+
+ # Preprocess the arguments
+ if (
+ (ncols is None or nrows is None) and (file in (sys.stderr, sys.stdout))
+ ) or dynamic_ncols: # pragma: no cover
+ if dynamic_ncols:
+ dynamic_ncols = _screen_shape_wrapper()
+ if dynamic_ncols:
+ ncols, nrows = dynamic_ncols(file)
+ else:
+ _dynamic_ncols = _screen_shape_wrapper()
+ if _dynamic_ncols:
+ _ncols, _nrows = _dynamic_ncols(file)
+ if ncols is None:
+ ncols = _ncols
+ if nrows is None:
+ nrows = _nrows
+
+ if miniters is None:
+ miniters = 0
+ dynamic_miniters = True
+ else:
+ dynamic_miniters = False
+
+ if mininterval is None:
+ mininterval = 0
+
+ if maxinterval is None:
+ maxinterval = 0
+
+ if ascii is None:
+ ascii = not _supports_unicode(file)
+
+ if bar_format and ascii is not True and not _is_ascii(ascii):
+ # Convert bar format into unicode since terminal uses unicode
+ bar_format = str(bar_format)
+
+ if smoothing is None:
+ smoothing = 0
+
+ # Store the arguments
+ self.iterable = iterable
+ self.desc = desc or ''
+ self.total = total
+ self.leave = leave
+ self.fp = file
+ self.ncols = ncols
+ self.nrows = nrows
+ self.mininterval = mininterval
+ self.maxinterval = maxinterval
+ self.miniters = miniters
+ self.dynamic_miniters = dynamic_miniters
+ self.ascii = ascii
+ self.disable = disable
+ self.unit = unit
+ self.unit_scale = unit_scale
+ self.unit_divisor = unit_divisor
+ self.initial = initial
+ self.lock_args = lock_args
+ self.delay = delay
+ self.gui = gui
+ self.dynamic_ncols = dynamic_ncols
+ self.smoothing = smoothing
+ self._ema_dn = EMA(smoothing)
+ self._ema_dt = EMA(smoothing)
+ self._ema_miniters = EMA(smoothing)
+ self.bar_format = bar_format
+ self.postfix = None
+ self.colour = colour
+ self._time = time
+ if postfix:
+ try:
+ self.set_postfix(refresh=False, **postfix)
+ except TypeError:
+ self.postfix = postfix
+
+ # Init the iterations counters
+ self.last_print_n = initial
+ self.n = initial
+
+ # if nested, at initial sp() call we replace '\r' by '\n' to
+ # not overwrite the outer progress bar
+ with self._lock:
+ # mark fixed positions as negative
+ self.pos = self._get_free_pos(self) if position is None else -position
+
+ if not gui:
+ # Initialize the screen printer
+ self.sp = self.status_printer(self.fp)
+ if delay <= 0:
+ self.refresh(lock_args=self.lock_args)
+
+ # Init the time counter
+ self.last_print_t = self._time()
+ # NB: Avoid race conditions by setting start_t at the very end of init
+ self.start_t = self.last_print_t
+
+ def __bool__(self):
+ if self.total is not None:
+ return self.total > 0
+ if self.iterable is None:
+ raise TypeError('bool() undefined when iterable == total == None')
+ return bool(self.iterable)
+
+ def __len__(self):
+ return (
+ self.total if self.iterable is None
+ else self.iterable.shape[0] if hasattr(self.iterable, "shape")
+ else len(self.iterable) if hasattr(self.iterable, "__len__")
+ else self.iterable.__length_hint__() if hasattr(self.iterable, "__length_hint__")
+ else getattr(self, "total", None))
+
+ def __reversed__(self):
+ try:
+ orig = self.iterable
+ except AttributeError:
+ raise TypeError("'tqdm' object is not reversible")
+ else:
+ self.iterable = reversed(self.iterable)
+ return self.__iter__()
+ finally:
+ self.iterable = orig
+
+ def __contains__(self, item):
+ contains = getattr(self.iterable, '__contains__', None)
+ return contains(item) if contains is not None else item in self.__iter__()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ try:
+ self.close()
+ except AttributeError:
+ # maybe eager thread cleanup upon external error
+ if (exc_type, exc_value, traceback) == (None, None, None):
+ raise
+ warn("AttributeError ignored", TqdmWarning, stacklevel=2)
+
+ def __del__(self):
+ self.close()
+
+ def __str__(self):
+ return self.format_meter(**self.format_dict)
+
+ @property
+ def _comparable(self):
+ return abs(getattr(self, "pos", 1 << 31))
+
+ def __hash__(self):
+ return id(self)
+
+ def __iter__(self):
+ """Backward-compatibility to use: for x in tqdm(iterable)"""
+
+ # Inlining instance variables as locals (speed optimisation)
+ iterable = self.iterable
+
+ # If the bar is disabled, then just walk the iterable
+ # (note: keep this check outside the loop for performance)
+ if self.disable:
+ for obj in iterable:
+ yield obj
+ return
+
+ mininterval = self.mininterval
+ last_print_t = self.last_print_t
+ last_print_n = self.last_print_n
+ min_start_t = self.start_t + self.delay
+ n = self.n
+ time = self._time
+
+ try:
+ for obj in iterable:
+ yield obj
+ # Update and possibly print the progressbar.
+ # Note: does not call self.update(1) for speed optimisation.
+ n += 1
+
+ if n - last_print_n >= self.miniters:
+ cur_t = time()
+ dt = cur_t - last_print_t
+ if dt >= mininterval and cur_t >= min_start_t:
+ self.update(n - last_print_n)
+ last_print_n = self.last_print_n
+ last_print_t = self.last_print_t
+ finally:
+ self.n = n
+ self.close()
+
+ def update(self, n=1):
+ """
+ Manually update the progress bar, useful for streams
+ such as reading files.
+ E.g.:
+ >>> t = tqdm(total=filesize) # Initialise
+ >>> for current_buffer in stream:
+ ... ...
+ ... t.update(len(current_buffer))
+ >>> t.close()
+ The last line is highly recommended, but possibly not necessary if
+ `t.update()` will be called in such a way that `filesize` will be
+ exactly reached and printed.
+
+ Parameters
+ ----------
+ n : int or float, optional
+ Increment to add to the internal counter of iterations
+ [default: 1]. If using float, consider specifying `{n:.3f}`
+ or similar in `bar_format`, or specifying `unit_scale`.
+
+ Returns
+ -------
+ out : bool or None
+ True if a `display()` was triggered.
+ """
+ if self.disable:
+ return
+
+ if n < 0:
+ self.last_print_n += n # for auto-refresh logic to work
+ self.n += n
+
+ # check counter first to reduce calls to time()
+ if self.n - self.last_print_n >= self.miniters:
+ cur_t = self._time()
+ dt = cur_t - self.last_print_t
+ if dt >= self.mininterval and cur_t >= self.start_t + self.delay:
+ cur_t = self._time()
+ dn = self.n - self.last_print_n # >= n
+ if self.smoothing and dt and dn:
+ # EMA (not just overall average)
+ self._ema_dn(dn)
+ self._ema_dt(dt)
+ self.refresh(lock_args=self.lock_args)
+ if self.dynamic_miniters:
+ # If no `miniters` was specified, adjust automatically to the
+ # maximum iteration rate seen so far between two prints.
+ # e.g.: After running `tqdm.update(5)`, subsequent
+ # calls to `tqdm.update()` will only cause an update after
+ # at least 5 more iterations.
+ if self.maxinterval and dt >= self.maxinterval:
+ self.miniters = dn * (self.mininterval or self.maxinterval) / dt
+ elif self.smoothing:
+ # EMA miniters update
+ self.miniters = self._ema_miniters(
+ dn * (self.mininterval / dt if self.mininterval and dt
+ else 1))
+ else:
+ # max iters between two prints
+ self.miniters = max(self.miniters, dn)
+
+ # Store old values for next call
+ self.last_print_n = self.n
+ self.last_print_t = cur_t
+ return True
+
+ def close(self):
+ """Cleanup and (if leave=False) close the progressbar."""
+ if self.disable:
+ return
+
+ # Prevent multiple closures
+ self.disable = True
+
+ # decrement instance pos and remove from internal set
+ pos = abs(self.pos)
+ self._decr_instances(self)
+
+ if self.last_print_t < self.start_t + self.delay:
+ # haven't ever displayed; nothing to clear
+ return
+
+ # GUI mode
+ if getattr(self, 'sp', None) is None:
+ return
+
+ # annoyingly, _supports_unicode isn't good enough
+ def fp_write(s):
+ self.fp.write(str(s))
+
+ try:
+ fp_write('')
+ except ValueError as e:
+ if 'closed' in str(e):
+ return
+ raise # pragma: no cover
+
+ leave = pos == 0 if self.leave is None else self.leave
+
+ with self._lock:
+ if leave:
+ # stats for overall rate (no weighted average)
+ self._ema_dt = lambda: None
+ self.display(pos=0)
+ fp_write('\n')
+ else:
+ # clear previous display
+ if self.display(msg='', pos=pos) and not pos:
+ fp_write('\r')
+
+ def clear(self, nolock=False):
+ """Clear current bar display."""
+ if self.disable:
+ return
+
+ if not nolock:
+ self._lock.acquire()
+ pos = abs(self.pos)
+ if pos < (self.nrows or 20):
+ self.moveto(pos)
+ self.sp('')
+ self.fp.write('\r') # place cursor back at the beginning of line
+ self.moveto(-pos)
+ if not nolock:
+ self._lock.release()
+
+ def refresh(self, nolock=False, lock_args=None):
+ """
+ Force refresh the display of this bar.
+
+ Parameters
+ ----------
+ nolock : bool, optional
+ If `True`, does not lock.
+ If [default: `False`]: calls `acquire()` on internal lock.
+ lock_args : tuple, optional
+ Passed to internal lock's `acquire()`.
+ If specified, will only `display()` if `acquire()` returns `True`.
+ """
+ if self.disable:
+ return
+
+ if not nolock:
+ if lock_args:
+ if not self._lock.acquire(*lock_args):
+ return False
+ else:
+ self._lock.acquire()
+ self.display()
+ if not nolock:
+ self._lock.release()
+ return True
+
+ def unpause(self):
+ """Restart tqdm timer from last print time."""
+ if self.disable:
+ return
+ cur_t = self._time()
+ self.start_t += cur_t - self.last_print_t
+ self.last_print_t = cur_t
+
+ def reset(self, total=None):
+ """
+ Resets to 0 iterations for repeated use.
+
+ Consider combining with `leave=True`.
+
+ Parameters
+ ----------
+ total : int or float, optional. Total to use for the new bar.
+ """
+ self.n = 0
+ if total is not None:
+ self.total = total
+ if self.disable:
+ return
+ self.last_print_n = 0
+ self.last_print_t = self.start_t = self._time()
+ self._ema_dn = EMA(self.smoothing)
+ self._ema_dt = EMA(self.smoothing)
+ self._ema_miniters = EMA(self.smoothing)
+ self.refresh()
+
+ def set_description(self, desc=None, refresh=True):
+ """
+ Set/modify description of the progress bar.
+
+ Parameters
+ ----------
+ desc : str, optional
+ refresh : bool, optional
+ Forces refresh [default: True].
+ """
+ self.desc = desc + ': ' if desc else ''
+ if refresh:
+ self.refresh()
+
+ def set_description_str(self, desc=None, refresh=True):
+ """Set/modify description without ': ' appended."""
+ self.desc = desc or ''
+ if refresh:
+ self.refresh()
+
+ def set_postfix(self, ordered_dict=None, refresh=True, **kwargs):
+ """
+ Set/modify postfix (additional stats)
+ with automatic formatting based on datatype.
+
+ Parameters
+ ----------
+ ordered_dict : dict or OrderedDict, optional
+ refresh : bool, optional
+ Forces refresh [default: True].
+ kwargs : dict, optional
+ """
+ # Sort in alphabetical order to be more deterministic
+ postfix = OrderedDict([] if ordered_dict is None else ordered_dict)
+ for key in sorted(kwargs.keys()):
+ postfix[key] = kwargs[key]
+ # Preprocess stats according to datatype
+ for key in postfix.keys():
+ # Number: limit the length of the string
+ if isinstance(postfix[key], Number):
+ postfix[key] = self.format_num(postfix[key])
+ # Else for any other type, try to get the string conversion
+ elif not isinstance(postfix[key], str):
+ postfix[key] = str(postfix[key])
+ # Else if it's a string, don't need to preprocess anything
+ # Stitch together to get the final postfix
+ self.postfix = ', '.join(key + '=' + postfix[key].strip()
+ for key in postfix.keys())
+ if refresh:
+ self.refresh()
+
+ def set_postfix_str(self, s='', refresh=True):
+ """
+ Postfix without dictionary expansion, similar to prefix handling.
+ """
+ self.postfix = str(s)
+ if refresh:
+ self.refresh()
+
+ def moveto(self, n):
+ # TODO: private method
+ self.fp.write('\n' * n + _term_move_up() * -n)
+ getattr(self.fp, 'flush', lambda: None)()
+
+ @property
+ def format_dict(self):
+ """Public API for read-only member access."""
+ if self.disable and not hasattr(self, 'unit'):
+ return defaultdict(lambda: None, {
+ 'n': self.n, 'total': self.total, 'elapsed': 0, 'unit': 'it'})
+ if self.dynamic_ncols:
+ self.ncols, self.nrows = self.dynamic_ncols(self.fp)
+ return {
+ 'n': self.n, 'total': self.total,
+ 'elapsed': self._time() - self.start_t if hasattr(self, 'start_t') else 0,
+ 'ncols': self.ncols, 'nrows': self.nrows, 'prefix': self.desc,
+ 'ascii': self.ascii, 'unit': self.unit, 'unit_scale': self.unit_scale,
+ 'rate': self._ema_dn() / self._ema_dt() if self._ema_dt() else None,
+ 'bar_format': self.bar_format, 'postfix': self.postfix,
+ 'unit_divisor': self.unit_divisor, 'initial': self.initial,
+ 'colour': self.colour}
+
+ def display(self, msg=None, pos=None):
+ """
+ Use `self.sp` to display `msg` in the specified `pos`.
+
+ Consider overloading this function when inheriting to use e.g.:
+ `self.some_frontend(**self.format_dict)` instead of `self.sp`.
+
+ Parameters
+ ----------
+ msg : str, optional. What to display (default: `repr(self)`).
+ pos : int, optional. Position to `moveto`
+ (default: `abs(self.pos)`).
+ """
+ if pos is None:
+ pos = abs(self.pos)
+
+ nrows = self.nrows or 20
+ if pos >= nrows - 1:
+ if pos >= nrows:
+ return False
+ if msg or msg is None: # override at `nrows - 1`
+ msg = " ... (more hidden) ..."
+
+ if not hasattr(self, "sp"):
+ raise TqdmDeprecationWarning(
+ "Please use `tqdm.gui.tqdm(...)`"
+ " instead of `tqdm(..., gui=True)`\n",
+ fp_write=getattr(self.fp, 'write', sys.stderr.write))
+
+ if pos:
+ self.moveto(pos)
+ self.sp(self.__str__() if msg is None else msg)
+ if pos:
+ self.moveto(-pos)
+ return True
+
+ @classmethod
+ @contextmanager
+ def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs):
+ """
+ stream : file-like object.
+ method : str, "read" or "write". The result of `read()` and
+ the first argument of `write()` should have a `len()`.
+
+ >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj:
+ ... while True:
+ ... chunk = fobj.read(chunk_size)
+ ... if not chunk:
+ ... break
+ """
+ with cls(total=total, **tqdm_kwargs) as t:
+ if bytes:
+ t.unit = "B"
+ t.unit_scale = True
+ t.unit_divisor = 1024
+ yield CallbackIOWrapper(t.update, stream, method)
+
+
+def trange(*args, **kwargs):
+ """Shortcut for tqdm(range(*args), **kwargs)."""
+ return tqdm(range(*args), **kwargs)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/tk.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/tk.py"
new file mode 100644
index 0000000..788303c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/tk.py"
@@ -0,0 +1,196 @@
+"""
+Tkinter GUI progressbar decorator for iterators.
+
+Usage:
+>>> from tqdm.tk import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+import re
+import sys
+import tkinter
+import tkinter.ttk as ttk
+from warnings import warn
+
+from .std import TqdmExperimentalWarning, TqdmWarning
+from .std import tqdm as std_tqdm
+
+__author__ = {"github.com/": ["richardsheridan", "casperdcl"]}
+__all__ = ['tqdm_tk', 'ttkrange', 'tqdm', 'trange']
+
+
+class tqdm_tk(std_tqdm): # pragma: no cover
+ """
+ Experimental Tkinter GUI version of tqdm!
+
+ Note: Window interactivity suffers if `tqdm_tk` is not running within
+ a Tkinter mainloop and values are generated infrequently. In this case,
+ consider calling `tqdm_tk.refresh()` frequently in the Tk thread.
+ """
+
+ # TODO: @classmethod: write()?
+
+ def __init__(self, *args, **kwargs):
+ """
+ This class accepts the following parameters *in addition* to
+ the parameters accepted by `tqdm`.
+
+ Parameters
+ ----------
+ grab : bool, optional
+ Grab the input across all windows of the process.
+ tk_parent : `tkinter.Wm`, optional
+ Parent Tk window.
+ cancel_callback : Callable, optional
+ Create a cancel button and set `cancel_callback` to be called
+ when the cancel or window close button is clicked.
+ """
+ kwargs = kwargs.copy()
+ kwargs['gui'] = True
+ # convert disable = None to False
+ kwargs['disable'] = bool(kwargs.get('disable', False))
+ self._warn_leave = 'leave' in kwargs
+ grab = kwargs.pop('grab', False)
+ tk_parent = kwargs.pop('tk_parent', None)
+ self._cancel_callback = kwargs.pop('cancel_callback', None)
+ super().__init__(*args, **kwargs)
+
+ if self.disable:
+ return
+
+ if tk_parent is None: # Discover parent widget
+ try:
+ tk_parent = tkinter._default_root
+ except AttributeError:
+ raise AttributeError(
+ "`tk_parent` required when using `tkinter.NoDefaultRoot()`")
+ if tk_parent is None: # use new default root window as display
+ self._tk_window = tkinter.Tk()
+ else: # some other windows already exist
+ self._tk_window = tkinter.Toplevel()
+ else:
+ self._tk_window = tkinter.Toplevel(tk_parent)
+
+ warn("GUI is experimental/alpha", TqdmExperimentalWarning, stacklevel=2)
+ self._tk_dispatching = self._tk_dispatching_helper()
+
+ self._tk_window.protocol("WM_DELETE_WINDOW", self.cancel)
+ self._tk_window.wm_title(self.desc)
+ self._tk_window.wm_attributes("-topmost", 1)
+ self._tk_window.after(0, lambda: self._tk_window.wm_attributes("-topmost", 0))
+ self._tk_n_var = tkinter.DoubleVar(self._tk_window, value=0)
+ self._tk_text_var = tkinter.StringVar(self._tk_window)
+ pbar_frame = ttk.Frame(self._tk_window, padding=5)
+ pbar_frame.pack()
+ _tk_label = ttk.Label(pbar_frame, textvariable=self._tk_text_var,
+ wraplength=600, anchor="center", justify="center")
+ _tk_label.pack()
+ self._tk_pbar = ttk.Progressbar(
+ pbar_frame, variable=self._tk_n_var, length=450)
+ if self.total is not None:
+ self._tk_pbar.configure(maximum=self.total)
+ else:
+ self._tk_pbar.configure(mode="indeterminate")
+ self._tk_pbar.pack()
+ if self._cancel_callback is not None:
+ _tk_button = ttk.Button(pbar_frame, text="Cancel", command=self.cancel)
+ _tk_button.pack()
+ if grab:
+ self._tk_window.grab_set()
+
+ def close(self):
+ if self.disable:
+ return
+
+ self.disable = True
+
+ with self.get_lock():
+ self._instances.remove(self)
+
+ def _close():
+ self._tk_window.after('idle', self._tk_window.destroy)
+ if not self._tk_dispatching:
+ self._tk_window.update()
+
+ self._tk_window.protocol("WM_DELETE_WINDOW", _close)
+
+ # if leave is set but we are self-dispatching, the left window is
+ # totally unresponsive unless the user manually dispatches
+ if not self.leave:
+ _close()
+ elif not self._tk_dispatching:
+ if self._warn_leave:
+ warn("leave flag ignored if not in tkinter mainloop",
+ TqdmWarning, stacklevel=2)
+ _close()
+
+ def clear(self, *_, **__):
+ pass
+
+ def display(self, *_, **__):
+ self._tk_n_var.set(self.n)
+ d = self.format_dict
+ # remove {bar}
+ d['bar_format'] = (d['bar_format'] or "{l_bar}<bar/>{r_bar}").replace(
+ "{bar}", "<bar/>")
+ msg = self.format_meter(**d)
+ if '<bar/>' in msg:
+ msg = "".join(re.split(r'\|?<bar/>\|?', msg, maxsplit=1))
+ self._tk_text_var.set(msg)
+ if not self._tk_dispatching:
+ self._tk_window.update()
+
+ def set_description(self, desc=None, refresh=True):
+ self.set_description_str(desc, refresh)
+
+ def set_description_str(self, desc=None, refresh=True):
+ self.desc = desc
+ if not self.disable:
+ self._tk_window.wm_title(desc)
+ if refresh and not self._tk_dispatching:
+ self._tk_window.update()
+
+ def cancel(self):
+ """
+ `cancel_callback()` followed by `close()`
+ when close/cancel buttons clicked.
+ """
+ if self._cancel_callback is not None:
+ self._cancel_callback()
+ self.close()
+
+ def reset(self, total=None):
+ """
+ Resets to 0 iterations for repeated use.
+
+ Parameters
+ ----------
+ total : int or float, optional. Total to use for the new bar.
+ """
+ if hasattr(self, '_tk_pbar'):
+ if total is None:
+ self._tk_pbar.configure(maximum=100, mode="indeterminate")
+ else:
+ self._tk_pbar.configure(maximum=total, mode="determinate")
+ super().reset(total=total)
+
+ @staticmethod
+ def _tk_dispatching_helper():
+ """determine if Tkinter mainloop is dispatching events"""
+ codes = {tkinter.mainloop.__code__, tkinter.Misc.mainloop.__code__}
+ for frame in sys._current_frames().values():
+ while frame:
+ if frame.f_code in codes:
+ return True
+ frame = frame.f_back
+ return False
+
+
+def ttkrange(*args, **kwargs):
+ """Shortcut for `tqdm.tk.tqdm(range(*args), **kwargs)`."""
+ return tqdm_tk(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_tk
+trange = ttkrange
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/tqdm.1" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/tqdm.1"
new file mode 100644
index 0000000..b90ab4b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/tqdm.1"
@@ -0,0 +1,314 @@
+.\" Automatically generated by Pandoc 1.19.2
+.\"
+.TH "TQDM" "1" "2015\-2021" "tqdm User Manuals" ""
+.hy
+.SH NAME
+.PP
+tqdm \- fast, extensible progress bar for Python and CLI
+.SH SYNOPSIS
+.PP
+tqdm [\f[I]options\f[]]
+.SH DESCRIPTION
+.PP
+See <https://github.com/tqdm/tqdm>.
+Can be used as a pipe:
+.IP
+.nf
+\f[C]
+$\ #\ count\ lines\ of\ code
+$\ cat\ *.py\ |\ tqdm\ |\ wc\ \-l
+327it\ [00:00,\ 981773.38it/s]
+327
+
+$\ #\ find\ all\ files
+$\ find\ .\ \-name\ "*.py"\ |\ tqdm\ |\ wc\ \-l
+432it\ [00:00,\ 833842.30it/s]
+432
+
+#\ ...\ and\ more\ info
+$\ find\ .\ \-name\ \[aq]*.py\[aq]\ \-exec\ wc\ \-l\ \\{}\ \\;\ \\
+\ \ |\ tqdm\ \-\-total\ 432\ \-\-unit\ files\ \-\-desc\ counting\ \\
+\ \ |\ awk\ \[aq]{\ sum\ +=\ $1\ };\ END\ {\ print\ sum\ }\[aq]
+counting:\ 100%|█████████|\ 432/432\ [00:00<00:00,\ 794361.83files/s]
+131998
+\f[]
+.fi
+.SH OPTIONS
+.TP
+.B \-h, \-\-help
+Print this help and exit.
+.RS
+.RE
+.TP
+.B \-v, \-\-version
+Print version and exit.
+.RS
+.RE
+.TP
+.B \-\-desc=\f[I]desc\f[]
+str, optional.
+Prefix for the progressbar.
+.RS
+.RE
+.TP
+.B \-\-total=\f[I]total\f[]
+int or float, optional.
+The number of expected iterations.
+If unspecified, len(iterable) is used if possible.
+If float("inf") or as a last resort, only basic progress statistics are
+displayed (no ETA, no progressbar).
+If \f[C]gui\f[] is True and this parameter needs subsequent updating,
+specify an initial arbitrary large positive number, e.g.
+9e9.
+.RS
+.RE
+.TP
+.B \-\-leave
+bool, optional.
+If [default: True], keeps all traces of the progressbar upon termination
+of iteration.
+If \f[C]None\f[], will leave only if \f[C]position\f[] is \f[C]0\f[].
+.RS
+.RE
+.TP
+.B \-\-ncols=\f[I]ncols\f[]
+int, optional.
+The width of the entire output message.
+If specified, dynamically resizes the progressbar to stay within this
+bound.
+If unspecified, attempts to use environment width.
+The fallback is a meter width of 10 and no limit for the counter and
+statistics.
+If 0, will not print any meter (only stats).
+.RS
+.RE
+.TP
+.B \-\-mininterval=\f[I]mininterval\f[]
+float, optional.
+Minimum progress display update interval [default: 0.1] seconds.
+.RS
+.RE
+.TP
+.B \-\-maxinterval=\f[I]maxinterval\f[]
+float, optional.
+Maximum progress display update interval [default: 10] seconds.
+Automatically adjusts \f[C]miniters\f[] to correspond to
+\f[C]mininterval\f[] after long display update lag.
+Only works if \f[C]dynamic_miniters\f[] or monitor thread is enabled.
+.RS
+.RE
+.TP
+.B \-\-miniters=\f[I]miniters\f[]
+int or float, optional.
+Minimum progress display update interval, in iterations.
+If 0 and \f[C]dynamic_miniters\f[], will automatically adjust to equal
+\f[C]mininterval\f[] (more CPU efficient, good for tight loops).
+If > 0, will skip display of specified number of iterations.
+Tweak this and \f[C]mininterval\f[] to get very efficient loops.
+If your progress is erratic with both fast and slow iterations (network,
+skipping items, etc) you should set miniters=1.
+.RS
+.RE
+.TP
+.B \-\-ascii=\f[I]ascii\f[]
+bool or str, optional.
+If unspecified or False, use unicode (smooth blocks) to fill the meter.
+The fallback is to use ASCII characters " 123456789#".
+.RS
+.RE
+.TP
+.B \-\-disable
+bool, optional.
+Whether to disable the entire progressbar wrapper [default: False].
+If set to None, disable on non\-TTY.
+.RS
+.RE
+.TP
+.B \-\-unit=\f[I]unit\f[]
+str, optional.
+String that will be used to define the unit of each iteration [default:
+it].
+.RS
+.RE
+.TP
+.B \-\-unit\-scale=\f[I]unit_scale\f[]
+bool or int or float, optional.
+If 1 or True, the number of iterations will be reduced/scaled
+automatically and a metric prefix following the International System of
+Units standard will be added (kilo, mega, etc.) [default: False].
+If any other non\-zero number, will scale \f[C]total\f[] and \f[C]n\f[].
+.RS
+.RE
+.TP
+.B \-\-dynamic\-ncols
+bool, optional.
+If set, constantly alters \f[C]ncols\f[] and \f[C]nrows\f[] to the
+environment (allowing for window resizes) [default: False].
+.RS
+.RE
+.TP
+.B \-\-smoothing=\f[I]smoothing\f[]
+float, optional.
+Exponential moving average smoothing factor for speed estimates (ignored
+in GUI mode).
+Ranges from 0 (average speed) to 1 (current/instantaneous speed)
+[default: 0.3].
+.RS
+.RE
+.TP
+.B \-\-bar\-format=\f[I]bar_format\f[]
+str, optional.
+Specify a custom bar string formatting.
+May impact performance.
+[default: \[aq]{l_bar}{bar}{r_bar}\[aq]], where l_bar=\[aq]{desc}:
+{percentage:3.0f}%|\[aq] and r_bar=\[aq]| {n_fmt}/{total_fmt}
+[{elapsed}<{remaining}, \[aq] \[aq]{rate_fmt}{postfix}]\[aq] Possible
+vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, percentage,
+elapsed, elapsed_s, ncols, nrows, desc, unit, rate, rate_fmt,
+rate_noinv, rate_noinv_fmt, rate_inv, rate_inv_fmt, postfix,
+unit_divisor, remaining, remaining_s, eta.
+Note that a trailing ": " is automatically removed after {desc} if the
+latter is empty.
+.RS
+.RE
+.TP
+.B \-\-initial=\f[I]initial\f[]
+int or float, optional.
+The initial counter value.
+Useful when restarting a progress bar [default: 0].
+If using float, consider specifying \f[C]{n:.3f}\f[] or similar in
+\f[C]bar_format\f[], or specifying \f[C]unit_scale\f[].
+.RS
+.RE
+.TP
+.B \-\-position=\f[I]position\f[]
+int, optional.
+Specify the line offset to print this bar (starting from 0) Automatic if
+unspecified.
+Useful to manage multiple bars at once (eg, from threads).
+.RS
+.RE
+.TP
+.B \-\-postfix=\f[I]postfix\f[]
+dict or *, optional.
+Specify additional stats to display at the end of the bar.
+Calls \f[C]set_postfix(**postfix)\f[] if possible (dict).
+.RS
+.RE
+.TP
+.B \-\-unit\-divisor=\f[I]unit_divisor\f[]
+float, optional.
+[default: 1000], ignored unless \f[C]unit_scale\f[] is True.
+.RS
+.RE
+.TP
+.B \-\-write\-bytes
+bool, optional.
+Whether to write bytes.
+If (default: False) will write unicode.
+.RS
+.RE
+.TP
+.B \-\-lock\-args=\f[I]lock_args\f[]
+tuple, optional.
+Passed to \f[C]refresh\f[] for intermediate output (initialisation,
+iterating, and updating).
+.RS
+.RE
+.TP
+.B \-\-nrows=\f[I]nrows\f[]
+int, optional.
+The screen height.
+If specified, hides nested bars outside this bound.
+If unspecified, attempts to use environment height.
+The fallback is 20.
+.RS
+.RE
+.TP
+.B \-\-colour=\f[I]colour\f[]
+str, optional.
+Bar colour (e.g.
+\[aq]green\[aq], \[aq]#00ff00\[aq]).
+.RS
+.RE
+.TP
+.B \-\-delay=\f[I]delay\f[]
+float, optional.
+Don\[aq]t display until [default: 0] seconds have elapsed.
+.RS
+.RE
+.TP
+.B \-\-delim=\f[I]delim\f[]
+chr, optional.
+Delimiting character [default: \[aq]\\n\[aq]].
+Use \[aq]\\0\[aq] for null.
+N.B.: on Windows systems, Python converts \[aq]\\n\[aq] to
+\[aq]\\r\\n\[aq].
+.RS
+.RE
+.TP
+.B \-\-buf\-size=\f[I]buf_size\f[]
+int, optional.
+String buffer size in bytes [default: 256] used when \f[C]delim\f[] is
+specified.
+.RS
+.RE
+.TP
+.B \-\-bytes
+bool, optional.
+If true, will count bytes, ignore \f[C]delim\f[], and default
+\f[C]unit_scale\f[] to True, \f[C]unit_divisor\f[] to 1024, and
+\f[C]unit\f[] to \[aq]B\[aq].
+.RS
+.RE
+.TP
+.B \-\-tee
+bool, optional.
+If true, passes \f[C]stdin\f[] to both \f[C]stderr\f[] and
+\f[C]stdout\f[].
+.RS
+.RE
+.TP
+.B \-\-update
+bool, optional.
+If true, will treat input as newly elapsed iterations, i.e.
+numbers to pass to \f[C]update()\f[].
+Note that this is slow (~2e5 it/s) since every input must be decoded as
+a number.
+.RS
+.RE
+.TP
+.B \-\-update\-to
+bool, optional.
+If true, will treat input as total elapsed iterations, i.e.
+numbers to assign to \f[C]self.n\f[].
+Note that this is slow (~2e5 it/s) since every input must be decoded as
+a number.
+.RS
+.RE
+.TP
+.B \-\-null
+bool, optional.
+If true, will discard input (no stdout).
+.RS
+.RE
+.TP
+.B \-\-manpath=\f[I]manpath\f[]
+str, optional.
+Directory in which to install tqdm man pages.
+.RS
+.RE
+.TP
+.B \-\-comppath=\f[I]comppath\f[]
+str, optional.
+Directory in which to place tqdm completion.
+.RS
+.RE
+.TP
+.B \-\-log=\f[I]log\f[]
+str, optional.
+CRITICAL|FATAL|ERROR|WARN(ING)|[default: \[aq]INFO\[aq]]|DEBUG|NOTSET.
+.RS
+.RE
+.SH AUTHORS
+tqdm developers <https://github.com/tqdm>.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/utils.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/utils.py"
new file mode 100644
index 0000000..af3ec7d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/utils.py"
@@ -0,0 +1,399 @@
+"""
+General helpers required for `tqdm.std`.
+"""
+import os
+import re
+import sys
+from functools import partial, partialmethod, wraps
+from inspect import signature
+# TODO consider using wcswidth third-party package for 0-width characters
+from unicodedata import east_asian_width
+from warnings import warn
+from weakref import proxy
+
+_range, _unich, _unicode, _basestring = range, chr, str, str
+CUR_OS = sys.platform
+IS_WIN = any(CUR_OS.startswith(i) for i in ['win32', 'cygwin'])
+IS_NIX = any(CUR_OS.startswith(i) for i in ['aix', 'linux', 'darwin', 'freebsd'])
+RE_ANSI = re.compile(r"\x1b\[[;\d]*[A-Za-z]")
+
+try:
+ if IS_WIN:
+ import colorama
+ else:
+ raise ImportError
+except ImportError:
+ colorama = None
+else:
+ try:
+ colorama.init(strip=False)
+ except TypeError:
+ colorama.init()
+
+
+def envwrap(prefix, types=None, is_method=False):
+ """
+ Override parameter defaults via `os.environ[prefix + param_name]`.
+ Maps UPPER_CASE env vars map to lower_case param names.
+ camelCase isn't supported (because Windows ignores case).
+
+ Precedence (highest first):
+
+ - call (`foo(a=3)`)
+ - environ (`FOO_A=2`)
+ - signature (`def foo(a=1)`)
+
+ Parameters
+ ----------
+ prefix : str
+ Env var prefix, e.g. "FOO_"
+ types : dict, optional
+ Fallback mappings `{'param_name': type, ...}` if types cannot be
+ inferred from function signature.
+ Consider using `types=collections.defaultdict(lambda: ast.literal_eval)`.
+ is_method : bool, optional
+ Whether to use `functools.partialmethod`. If (default: False) use `functools.partial`.
+
+ Examples
+ --------
+ ```
+ $ cat foo.py
+ from tqdm.utils import envwrap
+ @envwrap("FOO_")
+ def test(a=1, b=2, c=3):
+ print(f"received: a={a}, b={b}, c={c}")
+
+ $ FOO_A=42 FOO_C=1337 python -c 'import foo; foo.test(c=99)'
+ received: a=42, b=2, c=99
+ ```
+ """
+ if types is None:
+ types = {}
+ i = len(prefix)
+ env_overrides = {k[i:].lower(): v for k, v in os.environ.items() if k.startswith(prefix)}
+ part = partialmethod if is_method else partial
+
+ def wrap(func):
+ params = signature(func).parameters
+ # ignore unknown env vars
+ overrides = {k: v for k, v in env_overrides.items() if k in params}
+ # infer overrides' `type`s
+ for k in overrides:
+ param = params[k]
+ if param.annotation is not param.empty: # typehints
+ for typ in getattr(param.annotation, '__args__', (param.annotation,)):
+ try:
+ overrides[k] = typ(overrides[k])
+ except Exception:
+ pass
+ else:
+ break
+ elif param.default is not None: # type of default value
+ overrides[k] = type(param.default)(overrides[k])
+ else:
+ try: # `types` fallback
+ overrides[k] = types[k](overrides[k])
+ except KeyError: # keep unconverted (`str`)
+ pass
+ return part(func, **overrides)
+ return wrap
+
+
+class FormatReplace(object):
+ """
+ >>> a = FormatReplace('something')
+ >>> f"{a:5d}"
+ 'something'
+ """ # NOQA: P102
+ def __init__(self, replace=''):
+ self.replace = replace
+ self.format_called = 0
+
+ def __format__(self, _):
+ self.format_called += 1
+ return self.replace
+
+
+class Comparable(object):
+ """Assumes child has self._comparable attr/@property"""
+ def __lt__(self, other):
+ return self._comparable < other._comparable
+
+ def __le__(self, other):
+ return (self < other) or (self == other)
+
+ def __eq__(self, other):
+ return self._comparable == other._comparable
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __gt__(self, other):
+ return not self <= other
+
+ def __ge__(self, other):
+ return not self < other
+
+
+class ObjectWrapper(object):
+ def __getattr__(self, name):
+ return getattr(self._wrapped, name)
+
+ def __setattr__(self, name, value):
+ return setattr(self._wrapped, name, value)
+
+ def wrapper_getattr(self, name):
+ """Actual `self.getattr` rather than self._wrapped.getattr"""
+ try:
+ return object.__getattr__(self, name)
+ except AttributeError: # py2
+ return getattr(self, name)
+
+ def wrapper_setattr(self, name, value):
+ """Actual `self.setattr` rather than self._wrapped.setattr"""
+ return object.__setattr__(self, name, value)
+
+ def __init__(self, wrapped):
+ """
+ Thin wrapper around a given object
+ """
+ self.wrapper_setattr('_wrapped', wrapped)
+
+
+class SimpleTextIOWrapper(ObjectWrapper):
+ """
+ Change only `.write()` of the wrapped object by encoding the passed
+ value and passing the result to the wrapped object's `.write()` method.
+ """
+ # pylint: disable=too-few-public-methods
+ def __init__(self, wrapped, encoding):
+ super().__init__(wrapped)
+ self.wrapper_setattr('encoding', encoding)
+
+ def write(self, s):
+ """
+ Encode `s` and pass to the wrapped object's `.write()` method.
+ """
+ return self._wrapped.write(s.encode(self.wrapper_getattr('encoding')))
+
+ def __eq__(self, other):
+ return self._wrapped == getattr(other, '_wrapped', other)
+
+
+class DisableOnWriteError(ObjectWrapper):
+ """
+ Disable the given `tqdm_instance` upon `write()` or `flush()` errors.
+ """
+ @staticmethod
+ def disable_on_exception(tqdm_instance, func):
+ """
+ Quietly set `tqdm_instance.miniters=inf` if `func` raises `errno=5`.
+ """
+ tqdm_instance = proxy(tqdm_instance)
+
+ def inner(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except OSError as e:
+ if e.errno != 5:
+ raise
+ try:
+ tqdm_instance.miniters = float('inf')
+ except ReferenceError:
+ pass
+ except ValueError as e:
+ if 'closed' not in str(e):
+ raise
+ try:
+ tqdm_instance.miniters = float('inf')
+ except ReferenceError:
+ pass
+ return inner
+
+ def __init__(self, wrapped, tqdm_instance):
+ super().__init__(wrapped)
+ if hasattr(wrapped, 'write'):
+ self.wrapper_setattr(
+ 'write', self.disable_on_exception(tqdm_instance, wrapped.write))
+ if hasattr(wrapped, 'flush'):
+ self.wrapper_setattr(
+ 'flush', self.disable_on_exception(tqdm_instance, wrapped.flush))
+
+ def __eq__(self, other):
+ return self._wrapped == getattr(other, '_wrapped', other)
+
+
+class CallbackIOWrapper(ObjectWrapper):
+ def __init__(self, callback, stream, method="read"):
+ """
+ Wrap a given `file`-like object's `read()` or `write()` to report
+ lengths to the given `callback`
+ """
+ super().__init__(stream)
+ func = getattr(stream, method)
+ if method == "write":
+ @wraps(func)
+ def write(data, *args, **kwargs):
+ res = func(data, *args, **kwargs)
+ callback(len(data))
+ return res
+ self.wrapper_setattr('write', write)
+ elif method == "read":
+ @wraps(func)
+ def read(*args, **kwargs):
+ data = func(*args, **kwargs)
+ callback(len(data))
+ return data
+ self.wrapper_setattr('read', read)
+ else:
+ raise KeyError("Can only wrap read/write methods")
+
+
+def _is_utf(encoding):
+ try:
+ u'\u2588\u2589'.encode(encoding)
+ except UnicodeEncodeError:
+ return False
+ except Exception:
+ try:
+ return encoding.lower().startswith('utf-') or ('U8' == encoding)
+ except Exception:
+ return False
+ else:
+ return True
+
+
+def _supports_unicode(fp):
+ try:
+ return _is_utf(fp.encoding)
+ except AttributeError:
+ return False
+
+
+def _is_ascii(s):
+ if isinstance(s, str):
+ for c in s:
+ if ord(c) > 255:
+ return False
+ return True
+ return _supports_unicode(s)
+
+
+def _screen_shape_wrapper(): # pragma: no cover
+ """
+ Return a function which returns console dimensions (width, height).
+ Supported: linux, osx, windows, cygwin.
+ """
+ _screen_shape = None
+ if IS_WIN:
+ _screen_shape = _screen_shape_windows
+ if _screen_shape is None:
+ _screen_shape = _screen_shape_tput
+ if IS_NIX:
+ _screen_shape = _screen_shape_linux
+ return _screen_shape
+
+
+def _screen_shape_windows(fp): # pragma: no cover
+ try:
+ import struct
+ from ctypes import create_string_buffer, windll
+ from sys import stdin, stdout
+
+ io_handle = -12 # assume stderr
+ if fp == stdin:
+ io_handle = -10
+ elif fp == stdout:
+ io_handle = -11
+
+ h = windll.kernel32.GetStdHandle(io_handle)
+ csbi = create_string_buffer(22)
+ res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
+ if res:
+ (_bufx, _bufy, _curx, _cury, _wattr, left, top, right, bottom,
+ _maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
+ return right - left, bottom - top # +1
+ except Exception: # nosec
+ pass
+ return None, None
+
+
+def _screen_shape_tput(*_): # pragma: no cover
+ """cygwin xterm (windows)"""
+ try:
+ import shlex
+ from subprocess import check_call # nosec
+ return [int(check_call(shlex.split('tput ' + i))) - 1
+ for i in ('cols', 'lines')]
+ except Exception: # nosec
+ pass
+ return None, None
+
+
+def _screen_shape_linux(fp): # pragma: no cover
+
+ try:
+ from array import array
+ from fcntl import ioctl
+ from termios import TIOCGWINSZ
+ except ImportError:
+ return None, None
+ else:
+ try:
+ rows, cols = array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[:2]
+ return cols, rows
+ except Exception:
+ try:
+ return [int(os.environ[i]) - 1 for i in ("COLUMNS", "LINES")]
+ except (KeyError, ValueError):
+ return None, None
+
+
+def _environ_cols_wrapper(): # pragma: no cover
+ """
+ Return a function which returns console width.
+ Supported: linux, osx, windows, cygwin.
+ """
+ warn("Use `_screen_shape_wrapper()(file)[0]` instead of"
+ " `_environ_cols_wrapper()(file)`", DeprecationWarning, stacklevel=2)
+ shape = _screen_shape_wrapper()
+ if not shape:
+ return None
+
+ @wraps(shape)
+ def inner(fp):
+ return shape(fp)[0]
+
+ return inner
+
+
+def _term_move_up(): # pragma: no cover
+ return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A'
+
+
+def _text_width(s):
+ return sum(2 if east_asian_width(ch) in 'FW' else 1 for ch in str(s))
+
+
+def disp_len(data):
+ """
+ Returns the real on-screen length of a string which may contain
+ ANSI control codes and wide chars.
+ """
+ return _text_width(RE_ANSI.sub('', data))
+
+
+def disp_trim(data, length):
+ """
+ Trim a string which may contain ANSI control characters.
+ """
+ if len(data) == disp_len(data):
+ return data[:length]
+
+ ansi_present = bool(RE_ANSI.search(data))
+ while disp_len(data) > length: # carefully delete one char at a time
+ data = data[:-1]
+ if ansi_present and bool(RE_ANSI.search(data)):
+ # assume ANSI reset is required
+ return data if data.endswith("\033[0m") else data + "\033[0m"
+ return data
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/version.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/version.py"
new file mode 100644
index 0000000..11cbaea
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/tqdm/version.py"
@@ -0,0 +1,9 @@
+"""`tqdm` version detector. Precedence: installed dist, git, 'UNKNOWN'."""
+try:
+ from ._dist_ver import __version__
+except ImportError:
+ try:
+ from setuptools_scm import get_version
+ __version__ = get_version(root='..', relative_to=__file__)
+ except (ImportError, LookupError):
+ __version__ = "UNKNOWN"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/METADATA"
new file mode 100644
index 0000000..b09cb50
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/METADATA"
@@ -0,0 +1,72 @@
+Metadata-Version: 2.4
+Name: typing_extensions
+Version: 4.15.0
+Summary: Backported and Experimental Type Hints for Python 3.9+
+Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
+Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" <levkivskyi@gmail.com>
+Requires-Python: >=3.9
+Description-Content-Type: text/markdown
+License-Expression: PSF-2.0
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Software Development
+License-File: LICENSE
+Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
+Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
+Project-URL: Documentation, https://typing-extensions.readthedocs.io/
+Project-URL: Home, https://github.com/python/typing_extensions
+Project-URL: Q & A, https://github.com/python/typing/discussions
+Project-URL: Repository, https://github.com/python/typing_extensions
+
+# Typing Extensions
+
+[](https://gitter.im/python/typing)
+
+[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
+[PyPI](https://pypi.org/project/typing-extensions/)
+
+## Overview
+
+The `typing_extensions` module serves two related purposes:
+
+- Enable use of new type system features on older Python versions. For example,
+ `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
+ users on previous Python versions to use it too.
+- Enable experimentation with new type system PEPs before they are accepted and
+ added to the `typing` module.
+
+`typing_extensions` is treated specially by static type checkers such as
+mypy and pyright. Objects defined in `typing_extensions` are treated the same
+way as equivalent forms in `typing`.
+
+`typing_extensions` uses
+[Semantic Versioning](https://semver.org/). The
+major version will be incremented only for backwards-incompatible changes.
+Therefore, it's safe to depend
+on `typing_extensions` like this: `typing_extensions ~=x.y`,
+where `x.y` is the first version that includes all features you need.
+[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)
+is equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`
+unless you really know what you're doing; that defeats the purpose of
+semantic versioning.
+
+## Included items
+
+See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
+complete listing of module contents.
+
+## Contributing
+
+See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
+for how to contribute to `typing_extensions`.
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/RECORD"
new file mode 100644
index 0000000..5cbf8e7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/RECORD"
@@ -0,0 +1,7 @@
+__pycache__/typing_extensions.cpython-312.pyc,,
+typing_extensions-4.15.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+typing_extensions-4.15.0.dist-info/METADATA,sha256=wTg3j-jxiTSsmd4GBTXFPsbBOu7WXpTDJkHafuMZKnI,3259
+typing_extensions-4.15.0.dist-info/RECORD,,
+typing_extensions-4.15.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
+typing_extensions-4.15.0.dist-info/licenses/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
+typing_extensions.py,sha256=Qz0R0XDTok0usGXrwb_oSM6n49fOaFZ6tSvqLUwvftg,160429
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/WHEEL"
new file mode 100644
index 0000000..d8b9936
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/WHEEL"
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.12.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..f26bcf4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE"
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions.py"
new file mode 100644
index 0000000..77f33e1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/typing_extensions.py"
@@ -0,0 +1,4317 @@
+import abc
+import builtins
+import collections
+import collections.abc
+import contextlib
+import enum
+import functools
+import inspect
+import io
+import keyword
+import operator
+import sys
+import types as _types
+import typing
+import warnings
+
+# Breakpoint: https://github.com/python/cpython/pull/119891
+if sys.version_info >= (3, 14):
+ import annotationlib
+
+__all__ = [
+ # Super-special typing primitives.
+ 'Any',
+ 'ClassVar',
+ 'Concatenate',
+ 'Final',
+ 'LiteralString',
+ 'ParamSpec',
+ 'ParamSpecArgs',
+ 'ParamSpecKwargs',
+ 'Self',
+ 'Type',
+ 'TypeVar',
+ 'TypeVarTuple',
+ 'Unpack',
+
+ # ABCs (from collections.abc).
+ 'Awaitable',
+ 'AsyncIterator',
+ 'AsyncIterable',
+ 'Coroutine',
+ 'AsyncGenerator',
+ 'AsyncContextManager',
+ 'Buffer',
+ 'ChainMap',
+
+ # Concrete collection types.
+ 'ContextManager',
+ 'Counter',
+ 'Deque',
+ 'DefaultDict',
+ 'NamedTuple',
+ 'OrderedDict',
+ 'TypedDict',
+
+ # Structural checks, a.k.a. protocols.
+ 'SupportsAbs',
+ 'SupportsBytes',
+ 'SupportsComplex',
+ 'SupportsFloat',
+ 'SupportsIndex',
+ 'SupportsInt',
+ 'SupportsRound',
+ 'Reader',
+ 'Writer',
+
+ # One-off things.
+ 'Annotated',
+ 'assert_never',
+ 'assert_type',
+ 'clear_overloads',
+ 'dataclass_transform',
+ 'deprecated',
+ 'disjoint_base',
+ 'Doc',
+ 'evaluate_forward_ref',
+ 'get_overloads',
+ 'final',
+ 'Format',
+ 'get_annotations',
+ 'get_args',
+ 'get_origin',
+ 'get_original_bases',
+ 'get_protocol_members',
+ 'get_type_hints',
+ 'IntVar',
+ 'is_protocol',
+ 'is_typeddict',
+ 'Literal',
+ 'NewType',
+ 'overload',
+ 'override',
+ 'Protocol',
+ 'Sentinel',
+ 'reveal_type',
+ 'runtime',
+ 'runtime_checkable',
+ 'Text',
+ 'TypeAlias',
+ 'TypeAliasType',
+ 'TypeForm',
+ 'TypeGuard',
+ 'TypeIs',
+ 'TYPE_CHECKING',
+ 'type_repr',
+ 'Never',
+ 'NoReturn',
+ 'ReadOnly',
+ 'Required',
+ 'NotRequired',
+ 'NoDefault',
+ 'NoExtraItems',
+
+ # Pure aliases, have always been in typing
+ 'AbstractSet',
+ 'AnyStr',
+ 'BinaryIO',
+ 'Callable',
+ 'Collection',
+ 'Container',
+ 'Dict',
+ 'ForwardRef',
+ 'FrozenSet',
+ 'Generator',
+ 'Generic',
+ 'Hashable',
+ 'IO',
+ 'ItemsView',
+ 'Iterable',
+ 'Iterator',
+ 'KeysView',
+ 'List',
+ 'Mapping',
+ 'MappingView',
+ 'Match',
+ 'MutableMapping',
+ 'MutableSequence',
+ 'MutableSet',
+ 'Optional',
+ 'Pattern',
+ 'Reversible',
+ 'Sequence',
+ 'Set',
+ 'Sized',
+ 'TextIO',
+ 'Tuple',
+ 'Union',
+ 'ValuesView',
+ 'cast',
+ 'no_type_check',
+ 'no_type_check_decorator',
+]
+
+# for backward compatibility
+PEP_560 = True
+GenericMeta = type
+# Breakpoint: https://github.com/python/cpython/pull/116129
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+
+# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
+_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
+
+# The functions below are modified copies of typing internal helpers.
+# They are needed by _ProtocolMeta and they provide support for PEP 646.
+
+
+class _Sentinel:
+ def __repr__(self):
+ return "<sentinel>"
+
+
+_marker = _Sentinel()
+
+
+# Breakpoint: https://github.com/python/cpython/pull/27342
+if sys.version_info >= (3, 10):
+ def _should_collect_from_parameters(t):
+ return isinstance(
+ t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
+ )
+else:
+ def _should_collect_from_parameters(t):
+ return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
+
+
+NoReturn = typing.NoReturn
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T') # Any type.
+KT = typing.TypeVar('KT') # Key type.
+VT = typing.TypeVar('VT') # Value type.
+T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+
+# Breakpoint: https://github.com/python/cpython/pull/31841
+if sys.version_info >= (3, 11):
+ from typing import Any
+else:
+
+ class _AnyMeta(type):
+ def __instancecheck__(self, obj):
+ if self is Any:
+ raise TypeError("typing_extensions.Any cannot be used with isinstance()")
+ return super().__instancecheck__(obj)
+
+ def __repr__(self):
+ if self is Any:
+ return "typing_extensions.Any"
+ return super().__repr__()
+
+ class Any(metaclass=_AnyMeta):
+ """Special type indicating an unconstrained type.
+ - Any is compatible with every type.
+ - Any assumed to have all methods.
+ - All values assumed to be instances of Any.
+ Note that all the above statements are true from the point of view of
+ static type checkers. At runtime, Any should not be used with instance
+ checks.
+ """
+ def __new__(cls, *args, **kwargs):
+ if cls is Any:
+ raise TypeError("Any cannot be instantiated")
+ return super().__new__(cls, *args, **kwargs)
+
+
+ClassVar = typing.ClassVar
+
+# Vendored from cpython typing._SpecialFrom
+# Having a separate class means that instances will not be rejected by
+# typing._type_check.
+class _SpecialForm(typing._Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
+
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
+
+ def __getattr__(self, item):
+ if item in {'__name__', '__qualname__'}:
+ return self._name
+
+ raise AttributeError(item)
+
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
+
+ def __repr__(self):
+ return f'typing_extensions.{self._name}'
+
+ def __reduce__(self):
+ return self._name
+
+ def __call__(self, *args, **kwds):
+ raise TypeError(f"Cannot instantiate {self!r}")
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __instancecheck__(self, obj):
+ raise TypeError(f"{self} cannot be used with isinstance()")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError(f"{self} cannot be used with issubclass()")
+
+ @typing._tp_cache
+ def __getitem__(self, parameters):
+ return self._getitem(self, parameters)
+
+
+# Note that inheriting from this class means that the object will be
+# rejected by typing._type_check, so do not use it if the special form
+# is arguably valid as a type by itself.
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+
+Final = typing.Final
+
+# Breakpoint: https://github.com/python/cpython/pull/30530
+if sys.version_info >= (3, 11):
+ final = typing.final
+else:
+ # @final exists in 3.8+, but we backport it for all versions
+ # before 3.11 to keep support for the __final__ attribute.
+ # See https://bugs.python.org/issue46342
+ def final(f):
+ """This decorator can be used to indicate to type checkers that
+ the decorated method cannot be overridden, and decorated class
+ cannot be subclassed. For example:
+
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
+
+ There is no runtime checking of these properties. The decorator
+ sets the ``__final__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+ """
+ try:
+ f.__final__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
+ return f
+
+
+if hasattr(typing, "disjoint_base"): # 3.15
+ disjoint_base = typing.disjoint_base
+else:
+ def disjoint_base(cls):
+ """This decorator marks a class as a disjoint base.
+
+ Child classes of a disjoint base cannot inherit from other disjoint bases that are
+ not parent classes of the disjoint base.
+
+ For example:
+
+ @disjoint_base
+ class Disjoint1: pass
+
+ @disjoint_base
+ class Disjoint2: pass
+
+ class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error
+
+ Type checkers can use knowledge of disjoint bases to detect unreachable code
+ and determine when two types can overlap.
+
+ See PEP 800."""
+ cls.__disjoint_base__ = True
+ return cls
+
+
+def IntVar(name):
+ return typing.TypeVar(name)
+
+
+# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
+# Breakpoint: https://github.com/python/cpython/pull/29334
+if sys.version_info >= (3, 10, 1):
+ Literal = typing.Literal
+else:
+ def _flatten_literal_params(parameters):
+ """An internal helper for Literal creation: flatten Literals among parameters"""
+ params = []
+ for p in parameters:
+ if isinstance(p, _LiteralGenericAlias):
+ params.extend(p.__args__)
+ else:
+ params.append(p)
+ return tuple(params)
+
+ def _value_and_type_iter(params):
+ for p in params:
+ yield p, type(p)
+
+ class _LiteralGenericAlias(typing._GenericAlias, _root=True):
+ def __eq__(self, other):
+ if not isinstance(other, _LiteralGenericAlias):
+ return NotImplemented
+ these_args_deduped = set(_value_and_type_iter(self.__args__))
+ other_args_deduped = set(_value_and_type_iter(other.__args__))
+ return these_args_deduped == other_args_deduped
+
+ def __hash__(self):
+ return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+ class _LiteralForm(_ExtensionsSpecialForm, _root=True):
+ def __init__(self, doc: str):
+ self._name = 'Literal'
+ self._doc = self.__doc__ = doc
+
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+
+ parameters = _flatten_literal_params(parameters)
+
+ val_type_pairs = list(_value_and_type_iter(parameters))
+ try:
+ deduped_pairs = set(val_type_pairs)
+ except TypeError:
+ # unhashable parameters
+ pass
+ else:
+ # similar logic to typing._deduplicate on Python 3.9+
+ if len(deduped_pairs) < len(val_type_pairs):
+ new_parameters = []
+ for pair in val_type_pairs:
+ if pair in deduped_pairs:
+ new_parameters.append(pair[0])
+ deduped_pairs.remove(pair)
+ assert not deduped_pairs, deduped_pairs
+ parameters = tuple(new_parameters)
+
+ return _LiteralGenericAlias(self, parameters)
+
+ Literal = _LiteralForm(doc="""\
+ A type that can be used to indicate to type checkers
+ that the corresponding value has a value literally equivalent
+ to the provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to
+ the value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime
+ checking verifying that the parameter is actually a value
+ instead of a type.""")
+
+
+_overload_dummy = typing._overload_dummy
+
+
+if hasattr(typing, "get_overloads"): # 3.11+
+ overload = typing.overload
+ get_overloads = typing.get_overloads
+ clear_overloads = typing.clear_overloads
+else:
+ # {module: {qualname: {firstlineno: func}}}
+ _overload_registry = collections.defaultdict(
+ functools.partial(collections.defaultdict, dict)
+ )
+
+ def overload(func):
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+
+ The overloads for a function can be retrieved at runtime using the
+ get_overloads() function.
+ """
+ # classmethod and staticmethod
+ f = getattr(func, "__func__", func)
+ try:
+ _overload_registry[f.__module__][f.__qualname__][
+ f.__code__.co_firstlineno
+ ] = func
+ except AttributeError:
+ # Not a normal function; ignore.
+ pass
+ return _overload_dummy
+
+ def get_overloads(func):
+ """Return all defined overloads for *func* as a sequence."""
+ # classmethod and staticmethod
+ f = getattr(func, "__func__", func)
+ if f.__module__ not in _overload_registry:
+ return []
+ mod_dict = _overload_registry[f.__module__]
+ if f.__qualname__ not in mod_dict:
+ return []
+ return list(mod_dict[f.__qualname__].values())
+
+ def clear_overloads():
+ """Clear all overloads in the registry."""
+ _overload_registry.clear()
+
+
+# This is not a real generic class. Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+Deque = typing.Deque
+DefaultDict = typing.DefaultDict
+OrderedDict = typing.OrderedDict
+Counter = typing.Counter
+ChainMap = typing.ChainMap
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+# Breakpoint: https://github.com/python/cpython/pull/118681
+if sys.version_info >= (3, 13, 0, "beta"):
+ from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+ def _is_dunder(attr):
+ return attr.startswith('__') and attr.endswith('__')
+
+
+ class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+ super().__init__(origin, nparams, inst=inst, name=name)
+ self._defaults = defaults
+
+ def __setattr__(self, attr, val):
+ allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+ if _is_dunder(attr) or attr in allowed_attrs:
+ object.__setattr__(self, attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ @typing._tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(typing._type_check(p, msg) for p in params)
+ if (
+ self._defaults
+ and len(params) < self._nparams
+ and len(params) + len(self._defaults) >= self._nparams
+ ):
+ params = (*params, *self._defaults[len(params) - self._nparams:])
+ actual_len = len(params)
+
+ if actual_len != self._nparams:
+ if self._defaults:
+ expected = f"at least {self._nparams - len(self._defaults)}"
+ else:
+ expected = str(self._nparams)
+ if not self._nparams:
+ raise TypeError(f"{self} is not a generic class")
+ raise TypeError(
+ f"Too {'many' if actual_len > self._nparams else 'few'}"
+ f" arguments for {self};"
+ f" actual {actual_len}, expected {expected}"
+ )
+ return self.copy_with(params)
+
+ _NoneType = type(None)
+ Generator = _SpecialGenericAlias(
+ collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+ )
+ AsyncGenerator = _SpecialGenericAlias(
+ collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+ )
+ ContextManager = _SpecialGenericAlias(
+ contextlib.AbstractContextManager,
+ 2,
+ name="ContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+ AsyncContextManager = _SpecialGenericAlias(
+ contextlib.AbstractAsyncContextManager,
+ 2,
+ name="AsyncContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+
+
+_PROTO_ALLOWLIST = {
+ 'collections.abc': [
+ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+ 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+ ],
+ 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+ 'typing_extensions': ['Buffer'],
+}
+
+
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+ "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+ "__final__",
+}
+
+
+def _get_protocol_attrs(cls):
+ attrs = set()
+ for base in cls.__mro__[:-1]: # without object
+ if base.__name__ in {'Protocol', 'Generic'}:
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in (*base.__dict__, *annotations):
+ if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
+ attrs.add(attr)
+ return attrs
+
+
+def _caller(depth=1, default='__main__'):
+ try:
+ return sys._getframemodulename(depth + 1) or default
+ except AttributeError: # For platforms without _getframemodulename()
+ pass
+ try:
+ return sys._getframe(depth + 1).f_globals.get('__name__', default)
+ except (AttributeError, ValueError): # For platforms without _getframe()
+ pass
+ return None
+
+
+# `__match_args__` attribute was removed from protocol members in 3.13,
+# we want to backport this change to older Python versions.
+# Breakpoint: https://github.com/python/cpython/pull/110683
+if sys.version_info >= (3, 13):
+ Protocol = typing.Protocol
+else:
+ def _allow_reckless_class_checks(depth=2):
+ """Allow instance and class checks for special stdlib modules.
+ The abc and functools modules indiscriminately call isinstance() and
+ issubclass() on the whole MRO of a user class, which may contain protocols.
+ """
+ return _caller(depth) in {'abc', 'functools', None}
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ def _type_check_issubclass_arg_1(arg):
+ """Raise TypeError if `arg` is not an instance of `type`
+ in `issubclass(arg, <protocol>)`.
+
+ In most cases, this is verified by type.__subclasscheck__.
+ Checking it again unnecessarily would slow down issubclass() checks,
+ so, we don't perform this check unless we absolutely have to.
+
+ For various error paths, however,
+ we want to ensure that *this* error message is shown to the user
+ where relevant, rather than a typing.py-specific error message.
+ """
+ if not isinstance(arg, type):
+ # Same error message as for issubclass(1, int).
+ raise TypeError('issubclass() arg 1 must be a class')
+
+ # Inheriting from typing._ProtocolMeta isn't actually desirable,
+ # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+ # to mix without getting TypeErrors about "metaclass conflict"
+ class _ProtocolMeta(type(typing.Protocol)):
+ # This metaclass is somewhat unfortunate,
+ # but is necessary for several reasons...
+ #
+ # NOTE: DO NOT call super() in any methods in this class
+ # That would call the methods on typing._ProtocolMeta on Python <=3.11
+ # and those are slow
+ def __new__(mcls, name, bases, namespace, **kwargs):
+ if name == "Protocol" and len(bases) < 2:
+ pass
+ elif {Protocol, typing.Protocol} & set(bases):
+ for base in bases:
+ if not (
+ base in {object, typing.Generic, Protocol, typing.Protocol}
+ or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+ or is_protocol(base)
+ ):
+ raise TypeError(
+ f"Protocols can only inherit from other protocols, "
+ f"got {base!r}"
+ )
+ return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
+
+ def __init__(cls, *args, **kwargs):
+ abc.ABCMeta.__init__(cls, *args, **kwargs)
+ if getattr(cls, "_is_protocol", False):
+ cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+
+ def __subclasscheck__(cls, other):
+ if cls is Protocol:
+ return type.__subclasscheck__(cls, other)
+ if (
+ getattr(cls, '_is_protocol', False)
+ and not _allow_reckless_class_checks()
+ ):
+ if not getattr(cls, '_is_runtime_protocol', False):
+ _type_check_issubclass_arg_1(other)
+ raise TypeError(
+ "Instance and class checks can only be used with "
+ "@runtime_checkable protocols"
+ )
+ if (
+ # this attribute is set by @runtime_checkable:
+ cls.__non_callable_proto_members__
+ and cls.__dict__.get("__subclasshook__") is _proto_hook
+ ):
+ _type_check_issubclass_arg_1(other)
+ non_method_attrs = sorted(cls.__non_callable_proto_members__)
+ raise TypeError(
+ "Protocols with non-method members don't support issubclass()."
+ f" Non-method members: {str(non_method_attrs)[1:-1]}."
+ )
+ return abc.ABCMeta.__subclasscheck__(cls, other)
+
+ def __instancecheck__(cls, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if cls is Protocol:
+ return type.__instancecheck__(cls, instance)
+ if not getattr(cls, "_is_protocol", False):
+ # i.e., it's a concrete subclass of a protocol
+ return abc.ABCMeta.__instancecheck__(cls, instance)
+
+ if (
+ not getattr(cls, '_is_runtime_protocol', False) and
+ not _allow_reckless_class_checks()
+ ):
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+
+ if abc.ABCMeta.__instancecheck__(cls, instance):
+ return True
+
+ for attr in cls.__protocol_attrs__:
+ try:
+ val = inspect.getattr_static(instance, attr)
+ except AttributeError:
+ break
+ # this attribute is set by @runtime_checkable:
+ if val is None and attr not in cls.__non_callable_proto_members__:
+ break
+ else:
+ return True
+
+ return False
+
+ def __eq__(cls, other):
+ # Hack so that typing.Generic.__class_getitem__
+ # treats typing_extensions.Protocol
+ # as equivalent to typing.Protocol
+ if abc.ABCMeta.__eq__(cls, other) is True:
+ return True
+ return cls is Protocol and other is typing.Protocol
+
+ # This has to be defined, or the abc-module cache
+ # complains about classes with this metaclass being unhashable,
+ # if we define only __eq__!
+ def __hash__(cls) -> int:
+ return type.__hash__(cls)
+
+ @classmethod
+ def _proto_hook(cls, other):
+ if not cls.__dict__.get('_is_protocol', False):
+ return NotImplemented
+
+ for attr in cls.__protocol_attrs__:
+ for base in other.__mro__:
+ # Check if the members appears in the class dictionary...
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+
+ # ...or in annotations, if it is a sub-protocol.
+ annotations = getattr(base, '__annotations__', {})
+ if (
+ isinstance(annotations, collections.abc.Mapping)
+ and attr in annotations
+ and is_protocol(other)
+ ):
+ break
+ else:
+ return NotImplemented
+ return True
+
+ class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+ __doc__ = typing.Protocol.__doc__
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
+
+ def __init_subclass__(cls, *args, **kwargs):
+ super().__init_subclass__(*args, **kwargs)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', False):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # Prohibit instantiation for protocol classes
+ if cls._is_protocol and cls.__init__ is Protocol.__init__:
+ cls.__init__ = _no_init
+
+
+# Breakpoint: https://github.com/python/cpython/pull/113401
+if sys.version_info >= (3, 13):
+ runtime_checkable = typing.runtime_checkable
+else:
+ def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol.
+
+ Such protocol can be used with isinstance() and issubclass().
+ Raise TypeError if applied to a non-protocol class.
+ This allows a simple-minded structural check very similar to
+ one trick ponies in collections.abc such as Iterable.
+
+ For example::
+
+ @runtime_checkable
+ class Closable(Protocol):
+ def close(self): ...
+
+ assert isinstance(open('/some/file'), Closable)
+
+ Warning: this will check only the presence of the required methods,
+ not their type signatures!
+ """
+ if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
+ raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+ f' got {cls!r}')
+ cls._is_runtime_protocol = True
+
+ # typing.Protocol classes on <=3.11 break if we execute this block,
+ # because typing.Protocol classes on <=3.11 don't have a
+ # `__protocol_attrs__` attribute, and this block relies on the
+ # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+ # break if we *don't* execute this block, because *they* assume that all
+ # protocol classes have a `__non_callable_proto_members__` attribute
+ # (which this block sets)
+ if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
+ # PEP 544 prohibits using issubclass()
+ # with protocols that have non-method members.
+ # See gh-113320 for why we compute this attribute here,
+ # rather than in `_ProtocolMeta.__init__`
+ cls.__non_callable_proto_members__ = set()
+ for attr in cls.__protocol_attrs__:
+ try:
+ is_callable = callable(getattr(cls, attr, None))
+ except Exception as e:
+ raise TypeError(
+ f"Failed to determine whether protocol member {attr!r} "
+ "is a method member"
+ ) from e
+ else:
+ if not is_callable:
+ cls.__non_callable_proto_members__.add(attr)
+
+ return cls
+
+
+# The "runtime" alias exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# Our version of runtime-checkable protocols is faster on Python <=3.11
+# Breakpoint: https://github.com/python/cpython/pull/112717
+if sys.version_info >= (3, 12):
+ SupportsInt = typing.SupportsInt
+ SupportsFloat = typing.SupportsFloat
+ SupportsComplex = typing.SupportsComplex
+ SupportsBytes = typing.SupportsBytes
+ SupportsIndex = typing.SupportsIndex
+ SupportsAbs = typing.SupportsAbs
+ SupportsRound = typing.SupportsRound
+else:
+ @runtime_checkable
+ class SupportsInt(Protocol):
+ """An ABC with one abstract method __int__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __int__(self) -> int:
+ pass
+
+ @runtime_checkable
+ class SupportsFloat(Protocol):
+ """An ABC with one abstract method __float__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __float__(self) -> float:
+ pass
+
+ @runtime_checkable
+ class SupportsComplex(Protocol):
+ """An ABC with one abstract method __complex__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __complex__(self) -> complex:
+ pass
+
+ @runtime_checkable
+ class SupportsBytes(Protocol):
+ """An ABC with one abstract method __bytes__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __bytes__(self) -> bytes:
+ pass
+
+ @runtime_checkable
+ class SupportsIndex(Protocol):
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __index__(self) -> int:
+ pass
+
+ @runtime_checkable
+ class SupportsAbs(Protocol[T_co]):
+ """
+ An ABC with one abstract method __abs__ that is covariant in its return type.
+ """
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __abs__(self) -> T_co:
+ pass
+
+ @runtime_checkable
+ class SupportsRound(Protocol[T_co]):
+ """
+ An ABC with one abstract method __round__ that is covariant in its return type.
+ """
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __round__(self, ndigits: int = 0) -> T_co:
+ pass
+
+
+if hasattr(io, "Reader") and hasattr(io, "Writer"):
+ Reader = io.Reader
+ Writer = io.Writer
+else:
+ @runtime_checkable
+ class Reader(Protocol[T_co]):
+ """Protocol for simple I/O reader instances.
+
+ This protocol only supports blocking I/O.
+ """
+
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def read(self, size: int = ..., /) -> T_co:
+ """Read data from the input stream and return it.
+
+ If *size* is specified, at most *size* items (bytes/characters) will be
+ read.
+ """
+
+ @runtime_checkable
+ class Writer(Protocol[T_contra]):
+ """Protocol for simple I/O writer instances.
+
+ This protocol only supports blocking I/O.
+ """
+
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def write(self, data: T_contra, /) -> int:
+ """Write *data* to the output stream and return the number of items written.""" # noqa: E501
+
+
+_NEEDS_SINGLETONMETA = (
+ not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
+)
+
+if _NEEDS_SINGLETONMETA:
+ class SingletonMeta(type):
+ def __setattr__(cls, attr, value):
+ # TypeError is consistent with the behavior of NoneType
+ raise TypeError(
+ f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+ )
+
+
+if hasattr(typing, "NoDefault"):
+ NoDefault = typing.NoDefault
+else:
+ class NoDefaultType(metaclass=SingletonMeta):
+ """The type of the NoDefault singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoDefault") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoDefault"
+
+ def __reduce__(self):
+ return "NoDefault"
+
+ NoDefault = NoDefaultType()
+ del NoDefaultType
+
+if hasattr(typing, "NoExtraItems"):
+ NoExtraItems = typing.NoExtraItems
+else:
+ class NoExtraItemsType(metaclass=SingletonMeta):
+ """The type of the NoExtraItems singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoExtraItems") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoExtraItems"
+
+ def __reduce__(self):
+ return "NoExtraItems"
+
+ NoExtraItems = NoExtraItemsType()
+ del NoExtraItemsType
+
+if _NEEDS_SINGLETONMETA:
+ del SingletonMeta
+
+
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
+ # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+ # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
+ # The standard library TypedDict below Python 3.11 does not store runtime
+ # information about optional and required keys when using Required or NotRequired.
+ # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
+ # Aaaand on 3.12 we add __orig_bases__ to TypedDict
+ # to enable better runtime introspection.
+ # On 3.13 we deprecate some odd ways of creating TypedDicts.
+ # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+ # PEP 728 (still pending) makes more changes.
+ TypedDict = typing.TypedDict
+ _TypedDictMeta = typing._TypedDictMeta
+ is_typeddict = typing.is_typeddict
+else:
+ # 3.10.0 and later
+ _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+ def _get_typeddict_qualifiers(annotation_type):
+ while True:
+ annotation_origin = get_origin(annotation_type)
+ if annotation_origin is Annotated:
+ annotation_args = get_args(annotation_type)
+ if annotation_args:
+ annotation_type = annotation_args[0]
+ else:
+ break
+ elif annotation_origin is Required:
+ yield Required
+ annotation_type, = get_args(annotation_type)
+ elif annotation_origin is NotRequired:
+ yield NotRequired
+ annotation_type, = get_args(annotation_type)
+ elif annotation_origin is ReadOnly:
+ yield ReadOnly
+ annotation_type, = get_args(annotation_type)
+ else:
+ break
+
+ class _TypedDictMeta(type):
+
+ def __new__(cls, name, bases, ns, *, total=True, closed=None,
+ extra_items=NoExtraItems):
+ """Create new typed dict class object.
+
+ This method is called when TypedDict is subclassed,
+ or when TypedDict is instantiated. This way
+ TypedDict supports all three syntax forms described in its docstring.
+ Subclasses and instances of TypedDict return actual dictionaries.
+ """
+ for base in bases:
+ if type(base) is not _TypedDictMeta and base is not typing.Generic:
+ raise TypeError('cannot inherit from both a TypedDict type '
+ 'and a non-TypedDict base class')
+ if closed is not None and extra_items is not NoExtraItems:
+ raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
+
+ if any(issubclass(b, typing.Generic) for b in bases):
+ generic_base = (typing.Generic,)
+ else:
+ generic_base = ()
+
+ ns_annotations = ns.pop('__annotations__', None)
+
+ # typing.py generally doesn't let you inherit from plain Generic, unless
+ # the name of the class happens to be "Protocol"
+ tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
+ tp_dict.__name__ = name
+ if tp_dict.__qualname__ == "Protocol":
+ tp_dict.__qualname__ = name
+
+ if not hasattr(tp_dict, '__orig_bases__'):
+ tp_dict.__orig_bases__ = bases
+
+ annotations = {}
+ own_annotate = None
+ if ns_annotations is not None:
+ own_annotations = ns_annotations
+ elif sys.version_info >= (3, 14):
+ if hasattr(annotationlib, "get_annotate_from_class_namespace"):
+ own_annotate = annotationlib.get_annotate_from_class_namespace(ns)
+ else:
+ # 3.14.0a7 and earlier
+ own_annotate = ns.get("__annotate__")
+ if own_annotate is not None:
+ own_annotations = annotationlib.call_annotate_function(
+ own_annotate, Format.FORWARDREF, owner=tp_dict
+ )
+ else:
+ own_annotations = {}
+ else:
+ own_annotations = {}
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ if _TAKES_MODULE:
+ own_checked_annotations = {
+ n: typing._type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own_annotations.items()
+ }
+ else:
+ own_checked_annotations = {
+ n: typing._type_check(tp, msg)
+ for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+ readonly_keys = set()
+ mutable_keys = set()
+ extra_items_type = extra_items
+
+ for base in bases:
+ base_dict = base.__dict__
+
+ if sys.version_info <= (3, 14):
+ annotations.update(base_dict.get('__annotations__', {}))
+ required_keys.update(base_dict.get('__required_keys__', ()))
+ optional_keys.update(base_dict.get('__optional_keys__', ()))
+ readonly_keys.update(base_dict.get('__readonly_keys__', ()))
+ mutable_keys.update(base_dict.get('__mutable_keys__', ()))
+
+ # This was specified in an earlier version of PEP 728. Support
+ # is retained for backwards compatibility, but only for Python
+ # 3.13 and lower.
+ if (closed and sys.version_info < (3, 14)
+ and "__extra_items__" in own_checked_annotations):
+ annotation_type = own_checked_annotations.pop("__extra_items__")
+ qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+ if Required in qualifiers:
+ raise TypeError(
+ "Special key __extra_items__ does not support "
+ "Required"
+ )
+ if NotRequired in qualifiers:
+ raise TypeError(
+ "Special key __extra_items__ does not support "
+ "NotRequired"
+ )
+ extra_items_type = annotation_type
+
+ annotations.update(own_checked_annotations)
+ for annotation_key, annotation_type in own_checked_annotations.items():
+ qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+
+ if Required in qualifiers:
+ required_keys.add(annotation_key)
+ elif NotRequired in qualifiers:
+ optional_keys.add(annotation_key)
+ elif total:
+ required_keys.add(annotation_key)
+ else:
+ optional_keys.add(annotation_key)
+ if ReadOnly in qualifiers:
+ mutable_keys.discard(annotation_key)
+ readonly_keys.add(annotation_key)
+ else:
+ mutable_keys.add(annotation_key)
+ readonly_keys.discard(annotation_key)
+
+ # Breakpoint: https://github.com/python/cpython/pull/119891
+ if sys.version_info >= (3, 14):
+ def __annotate__(format):
+ annos = {}
+ for base in bases:
+ if base is Generic:
+ continue
+ base_annotate = base.__annotate__
+ if base_annotate is None:
+ continue
+ base_annos = annotationlib.call_annotate_function(
+ base_annotate, format, owner=base)
+ annos.update(base_annos)
+ if own_annotate is not None:
+ own = annotationlib.call_annotate_function(
+ own_annotate, format, owner=tp_dict)
+ if format != Format.STRING:
+ own = {
+ n: typing._type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own.items()
+ }
+ elif format == Format.STRING:
+ own = annotationlib.annotations_to_string(own_annotations)
+ elif format in (Format.FORWARDREF, Format.VALUE):
+ own = own_checked_annotations
+ else:
+ raise NotImplementedError(format)
+ annos.update(own)
+ return annos
+
+ tp_dict.__annotate__ = __annotate__
+ else:
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
+ tp_dict.__readonly_keys__ = frozenset(readonly_keys)
+ tp_dict.__mutable_keys__ = frozenset(mutable_keys)
+ tp_dict.__total__ = total
+ tp_dict.__closed__ = closed
+ tp_dict.__extra_items__ = extra_items_type
+ return tp_dict
+
+ __call__ = dict # static method
+
+ def __subclasscheck__(cls, other):
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+
+ __instancecheck__ = __subclasscheck__
+
+ _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+
+ def _create_typeddict(
+ typename,
+ fields,
+ /,
+ *,
+ typing_is_inline,
+ total,
+ closed,
+ extra_items,
+ **kwargs,
+ ):
+ if fields is _marker or fields is None:
+ if fields is _marker:
+ deprecated_thing = (
+ "Failing to pass a value for the 'fields' parameter"
+ )
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+ example = f"`{typename} = TypedDict({typename!r}, {{}})`"
+ deprecation_msg = (
+ f"{deprecated_thing} is deprecated and will be disallowed in "
+ "Python 3.15. To create a TypedDict class with 0 fields "
+ "using the functional syntax, pass an empty dictionary, e.g. "
+ ) + example + "."
+ warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+ # Support a field called "closed"
+ if closed is not False and closed is not True and closed is not None:
+ kwargs["closed"] = closed
+ closed = None
+ # Or "extra_items"
+ if extra_items is not NoExtraItems:
+ kwargs["extra_items"] = extra_items
+ extra_items = NoExtraItems
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+ if kwargs:
+ # Breakpoint: https://github.com/python/cpython/pull/104891
+ if sys.version_info >= (3, 13):
+ raise TypeError("TypedDict takes no keyword arguments")
+ warnings.warn(
+ "The kwargs-based syntax for TypedDict definitions is deprecated "
+ "in Python 3.11, will be removed in Python 3.13, and may not be "
+ "understood by third-party type checkers.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ ns = {'__annotations__': dict(fields)}
+ module = _caller(depth=4 if typing_is_inline else 2)
+ if module is not None:
+ # Setting correct module is necessary to make typed dict classes
+ # pickleable.
+ ns['__module__'] = module
+
+ td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
+ extra_items=extra_items)
+ td.__orig_bases__ = (TypedDict,)
+ return td
+
+ class _TypedDictSpecialForm(_SpecialForm, _root=True):
+ def __call__(
+ self,
+ typename,
+ fields=_marker,
+ /,
+ *,
+ total=True,
+ closed=None,
+ extra_items=NoExtraItems,
+ **kwargs
+ ):
+ return _create_typeddict(
+ typename,
+ fields,
+ typing_is_inline=False,
+ total=total,
+ closed=closed,
+ extra_items=extra_items,
+ **kwargs,
+ )
+
+ def __mro_entries__(self, bases):
+ return (_TypedDict,)
+
+ @_TypedDictSpecialForm
+ def TypedDict(self, args):
+ """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type such that a type checker will expect all
+ instances to have a certain set of keys, where each key is
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime.
+
+ Usage::
+
+ class Point2D(TypedDict):
+ x: int
+ y: int
+ label: str
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports an additional equivalent form::
+
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ By default, all keys must be present in a TypedDict. It is possible
+ to override this by specifying totality::
+
+ class Point2D(TypedDict, total=False):
+ x: int
+ y: int
+
+ This means that a Point2D TypedDict can have any of the keys omitted. A type
+ checker is only expected to support a literal False or True as the value of
+ the total argument. True is the default, and makes all items defined in the
+ class body be required.
+
+ The Required and NotRequired special forms can also be used to mark
+ individual keys as being required or not required::
+
+ class Point2D(TypedDict):
+ x: int # the "x" key must always be present (Required is the default)
+ y: NotRequired[int] # the "y" key can be omitted
+
+ See PEP 655 for more details on Required and NotRequired.
+ """
+ # This runs when creating inline TypedDicts:
+ if not isinstance(args, dict):
+ raise TypeError(
+ "TypedDict[...] should be used with a single dict argument"
+ )
+
+ return _create_typeddict(
+ "<inline TypedDict>",
+ args,
+ typing_is_inline=True,
+ total=True,
+ closed=True,
+ extra_items=NoExtraItems,
+ )
+
+ _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
+
+ def is_typeddict(tp):
+ """Check if an annotation is a TypedDict class
+
+ For example::
+ class Film(TypedDict):
+ title: str
+ year: int
+
+ is_typeddict(Film) # => True
+ is_typeddict(Union[list, str]) # => False
+ """
+ return isinstance(tp, _TYPEDDICT_TYPES)
+
+
+if hasattr(typing, "assert_type"):
+ assert_type = typing.assert_type
+
+else:
+ def assert_type(val, typ, /):
+ """Assert (to the type checker) that the value is of the given type.
+
+ When the type checker encounters a call to assert_type(), it
+ emits an error if the value is not of the specified type::
+
+ def greet(name: str) -> None:
+ assert_type(name, str) # ok
+ assert_type(name, int) # type checker error
+
+ At runtime this returns the first argument unchanged and otherwise
+ does nothing.
+ """
+ return val
+
+
+if hasattr(typing, "ReadOnly"): # 3.13+
+ get_type_hints = typing.get_type_hints
+else: # <=3.13
+ # replaces _strip_annotations()
+ def _strip_extras(t):
+ """Strips Annotated, Required and NotRequired from a given type."""
+ if isinstance(t, typing._AnnotatedAlias):
+ return _strip_extras(t.__origin__)
+ if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
+ return _strip_extras(t.__args__[0])
+ if isinstance(t, typing._GenericAlias):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return t.copy_with(stripped_args)
+ if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return _types.GenericAlias(t.__origin__, stripped_args)
+ if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return functools.reduce(operator.or_, stripped_args)
+
+ return t
+
+ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+ """Return type hints for an object.
+
+ This is often the same as obj.__annotations__, but it handles
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
+ (unless 'include_extras=True').
+
+ The argument may be a module, class, method, or function. The annotations
+ are returned as a dictionary. For classes, annotations include also
+ inherited members.
+
+ TypeError is raised if the argument is not of a type that can contain
+ annotations, and an empty dictionary is returned if no annotations are
+ present.
+
+ BEWARE -- the behavior of globalns and localns is counterintuitive
+ (unless you are familiar with how eval() and exec() work). The
+ search order is locals first, then globals.
+
+ - If no dict arguments are passed, an attempt is made to use the
+ globals from obj (or the respective module's globals for classes),
+ and these are also used as the locals. If the object does not appear
+ to have globals, an empty dictionary is used.
+
+ - If one dict argument is passed, it is used for both globals and
+ locals.
+
+ - If two dict arguments are passed, they specify globals and
+ locals, respectively.
+ """
+ hint = typing.get_type_hints(
+ obj, globalns=globalns, localns=localns, include_extras=True
+ )
+ # Breakpoint: https://github.com/python/cpython/pull/30304
+ if sys.version_info < (3, 11):
+ _clean_optional(obj, hint, globalns, localns)
+ if include_extras:
+ return hint
+ return {k: _strip_extras(t) for k, t in hint.items()}
+
+ _NoneType = type(None)
+
+ def _could_be_inserted_optional(t):
+ """detects Union[..., None] pattern"""
+ if not isinstance(t, typing._UnionGenericAlias):
+ return False
+ # Assume if last argument is not None they are user defined
+ if t.__args__[-1] is not _NoneType:
+ return False
+ return True
+
+ # < 3.11
+ def _clean_optional(obj, hints, globalns=None, localns=None):
+ # reverts injected Union[..., None] cases from typing.get_type_hints
+ # when a None default value is used.
+ # see https://github.com/python/typing_extensions/issues/310
+ if not hints or isinstance(obj, type):
+ return
+ defaults = typing._get_defaults(obj) # avoid accessing __annotations___
+ if not defaults:
+ return
+ original_hints = obj.__annotations__
+ for name, value in hints.items():
+ # Not a Union[..., None] or replacement conditions not fullfilled
+ if (not _could_be_inserted_optional(value)
+ or name not in defaults
+ or defaults[name] is not None
+ ):
+ continue
+ original_value = original_hints[name]
+ # value=NoneType should have caused a skip above but check for safety
+ if original_value is None:
+ original_value = _NoneType
+ # Forward reference
+ if isinstance(original_value, str):
+ if globalns is None:
+ if isinstance(obj, _types.ModuleType):
+ globalns = obj.__dict__
+ else:
+ nsobj = obj
+ # Find globalns for the unwrapped object.
+ while hasattr(nsobj, '__wrapped__'):
+ nsobj = nsobj.__wrapped__
+ globalns = getattr(nsobj, '__globals__', {})
+ if localns is None:
+ localns = globalns
+ elif localns is None:
+ localns = globalns
+
+ original_value = ForwardRef(
+ original_value,
+ is_argument=not isinstance(obj, _types.ModuleType)
+ )
+ original_evaluated = typing._eval_type(original_value, globalns, localns)
+ # Compare if values differ. Note that even if equal
+ # value might be cached by typing._tp_cache contrary to original_evaluated
+ if original_evaluated != value or (
+ # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
+ hasattr(_types, "UnionType")
+ and isinstance(original_evaluated, _types.UnionType)
+ and not isinstance(value, _types.UnionType)
+ ):
+ hints[name] = original_evaluated
+
+# Python 3.9 has get_origin() and get_args() but those implementations don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+# Breakpoint: https://github.com/python/cpython/pull/25298
+if sys.version_info >= (3, 10):
+ get_origin = typing.get_origin
+ get_args = typing.get_args
+# 3.9
+else:
+ def get_origin(tp):
+ """Get the unsubscripted version of a type.
+
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
+
+ get_origin(Literal[42]) is Literal
+ get_origin(int) is None
+ get_origin(ClassVar[int]) is ClassVar
+ get_origin(Generic) is Generic
+ get_origin(Generic[T]) is Generic
+ get_origin(Union[T, int]) is Union
+ get_origin(List[Tuple[T, T]][int]) == list
+ get_origin(P.args) is P
+ """
+ if isinstance(tp, typing._AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias,
+ ParamSpecArgs, ParamSpecKwargs)):
+ return tp.__origin__
+ if tp is typing.Generic:
+ return typing.Generic
+ return None
+
+ def get_args(tp):
+ """Get type arguments with all substitutions performed.
+
+ For unions, basic simplifications used by Union constructor are performed.
+ Examples::
+ get_args(Dict[str, int]) == (str, int)
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+ get_args(Callable[[], T][int]) == ([], int)
+ """
+ if isinstance(tp, typing._AnnotatedAlias):
+ return (tp.__origin__, *tp.__metadata__)
+ if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)):
+ res = tp.__args__
+ if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+ TypeAlias = typing.TypeAlias
+# 3.9
+else:
+ @_ExtensionsSpecialForm
+ def TypeAlias(self, parameters):
+ """Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example above.
+ """
+ raise TypeError(f"{self} is not subscriptable")
+
+
+def _set_default(type_param, default):
+ type_param.has_default = lambda: default is not NoDefault
+ type_param.__default__ = default
+
+
+def _set_module(typevarlike):
+ # for pickling:
+ def_mod = _caller(depth=2)
+ if def_mod != 'typing_extensions':
+ typevarlike.__module__ = def_mod
+
+
+class _DefaultMixin:
+ """Mixin for TypeVarLike defaults."""
+
+ __slots__ = ()
+ __init__ = _set_default
+
+
+# Classes using this metaclass must provide a _backported_typevarlike ClassVar
+class _TypeVarLikeMeta(type):
+ def __instancecheck__(cls, __instance: Any) -> bool:
+ return isinstance(__instance, cls._backported_typevarlike)
+
+
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVar
+else:
+ # Add default and infer_variance parameters from PEP 696 and 695
+ class TypeVar(metaclass=_TypeVarLikeMeta):
+ """Type variable."""
+
+ _backported_typevarlike = typing.TypeVar
+
+ def __new__(cls, name, *constraints, bound=None,
+ covariant=False, contravariant=False,
+ default=NoDefault, infer_variance=False):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant)
+ if infer_variance and (covariant or contravariant):
+ raise ValueError("Variance cannot be specified with infer_variance.")
+ typevar.__infer_variance__ = infer_variance
+
+ _set_default(typevar, default)
+ _set_module(typevar)
+
+ def _tvar_prepare_subst(alias, args):
+ if (
+ typevar.has_default()
+ and alias.__parameters__.index(typevar) == len(args)
+ ):
+ args += (typevar.__default__,)
+ return args
+
+ typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+ return typevar
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+ ParamSpecArgs = typing.ParamSpecArgs
+ ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.9
+else:
+ class _Immutable:
+ """Mixin to indicate that object should not be copied."""
+ __slots__ = ()
+
+ def __copy__(self):
+ return self
+
+ def __deepcopy__(self, memo):
+ return self
+
+ class ParamSpecArgs(_Immutable):
+ """The args for a ParamSpec object.
+
+ Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+ ParamSpecArgs objects have a reference back to their ParamSpec:
+
+ P.args.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.args"
+
+ def __eq__(self, other):
+ if not isinstance(other, ParamSpecArgs):
+ return NotImplemented
+ return self.__origin__ == other.__origin__
+
+ class ParamSpecKwargs(_Immutable):
+ """The kwargs for a ParamSpec object.
+
+ Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+ ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+ P.kwargs.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.kwargs"
+
+ def __eq__(self, other):
+ if not isinstance(other, ParamSpecKwargs):
+ return NotImplemented
+ return self.__origin__ == other.__origin__
+
+
+if _PEP_696_IMPLEMENTED:
+ from typing import ParamSpec
+
+# 3.10+
+elif hasattr(typing, 'ParamSpec'):
+
+ # Add default parameter - PEP 696
+ class ParamSpec(metaclass=_TypeVarLikeMeta):
+ """Parameter specification."""
+
+ _backported_typevarlike = typing.ParamSpec
+
+ def __new__(cls, name, *, bound=None,
+ covariant=False, contravariant=False,
+ infer_variance=False, default=NoDefault):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+ paramspec = typing.ParamSpec(name, bound=bound,
+ covariant=covariant,
+ contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ paramspec = typing.ParamSpec(name, bound=bound,
+ covariant=covariant,
+ contravariant=contravariant)
+ paramspec.__infer_variance__ = infer_variance
+
+ _set_default(paramspec, default)
+ _set_module(paramspec)
+
+ def _paramspec_prepare_subst(alias, args):
+ params = alias.__parameters__
+ i = params.index(paramspec)
+ if i == len(args) and paramspec.has_default():
+ args = [*args, paramspec.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {alias}")
+ # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+ if len(params) == 1 and not typing._is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ # Convert lists to tuples to help other libraries cache the results.
+ elif isinstance(args[i], list):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
+ return args
+
+ paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
+ return paramspec
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
+
+# 3.9
+else:
+
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class ParamSpec(list, _DefaultMixin):
+ """Parameter specification variable.
+
+ Usage::
+
+ P = ParamSpec('P')
+
+ Parameter specification variables exist primarily for the benefit of static
+ type checkers. They are used to forward the parameter types of one
+ callable to another callable, a pattern commonly found in higher order
+ functions and decorators. They are only valid when used in ``Concatenate``,
+ or s the first argument to ``Callable``. In Python 3.10 and higher,
+ they are also supported in user-defined Generics at runtime.
+ See class Generic for more information on generic types. An
+ example for annotating a decorator::
+
+ T = TypeVar('T')
+ P = ParamSpec('P')
+
+ def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+ '''A type-safe decorator to add logging to a function.'''
+ def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+ logging.info(f'{f.__name__} was called')
+ return f(*args, **kwargs)
+ return inner
+
+ @add_logging
+ def add_two(x: float, y: float) -> float:
+ '''Add two numbers together.'''
+ return x + y
+
+ Parameter specification variables defined with covariant=True or
+ contravariant=True can be used to declare covariant or contravariant
+ generic types. These keyword arguments are valid, but their actual semantics
+ are yet to be decided. See PEP 612 for details.
+
+ Parameter specification variables can be introspected. e.g.:
+
+ P.__name__ == 'T'
+ P.__bound__ == None
+ P.__covariant__ == False
+ P.__contravariant__ == False
+
+ Note that only parameter specification variables defined in global scope can
+ be pickled.
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ @property
+ def args(self):
+ return ParamSpecArgs(self)
+
+ @property
+ def kwargs(self):
+ return ParamSpecKwargs(self)
+
+ def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
+ infer_variance=False, default=NoDefault):
+ list.__init__(self, [self])
+ self.__name__ = name
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ self.__infer_variance__ = bool(infer_variance)
+ if bound:
+ self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+ else:
+ self.__bound__ = None
+ _DefaultMixin.__init__(self, default)
+
+ # for pickling:
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __repr__(self):
+ if self.__infer_variance__:
+ prefix = ''
+ elif self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ # Hack to get typing._type_check to pass.
+ def __call__(self, *args, **kwargs):
+ pass
+
+
+# 3.9
+if not hasattr(typing, 'Concatenate'):
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+
+ # 3.9.0-1
+ if not hasattr(typing, '_type_convert'):
+ def _type_convert(arg, module=None, *, allow_special_forms=False):
+ """For converting None to type(None), and strings to ForwardRef."""
+ if arg is None:
+ return type(None)
+ if isinstance(arg, str):
+ if sys.version_info <= (3, 9, 6):
+ return ForwardRef(arg)
+ if sys.version_info <= (3, 9, 7):
+ return ForwardRef(arg, module=module)
+ return ForwardRef(arg, module=module, is_class=allow_special_forms)
+ return arg
+ else:
+ _type_convert = typing._type_convert
+
+ class _ConcatenateGenericAlias(list):
+
+ # Trick Generic into looking into this for __parameters__.
+ __class__ = typing._GenericAlias
+
+ def __init__(self, origin, args):
+ super().__init__(args)
+ self.__origin__ = origin
+ self.__args__ = args
+
+ def __repr__(self):
+ _type_repr = typing._type_repr
+ return (f'{_type_repr(self.__origin__)}'
+ f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
+ # Hack to get typing._type_check to pass in Generic.
+ def __call__(self, *args, **kwargs):
+ pass
+
+ @property
+ def __parameters__(self):
+ return tuple(
+ tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+ )
+
+ # 3.9 used by __getitem__ below
+ def copy_with(self, params):
+ if isinstance(params[-1], _ConcatenateGenericAlias):
+ params = (*params[:-1], *params[-1].__args__)
+ elif isinstance(params[-1], (list, tuple)):
+ return (*params[:-1], *params[-1])
+ elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ return self.__class__(self.__origin__, params)
+
+ # 3.9; accessed during GenericAlias.__getitem__ when substituting
+ def __getitem__(self, args):
+ if self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
+ raise TypeError(f"Cannot subscript already-subscripted {self}")
+ if not self.__parameters__:
+ raise TypeError(f"{self} is not a generic class")
+
+ if not isinstance(args, tuple):
+ args = (args,)
+ args = _unpack_args(*(_type_convert(p) for p in args))
+ params = self.__parameters__
+ for param in params:
+ prepare = getattr(param, "__typing_prepare_subst__", None)
+ if prepare is not None:
+ args = prepare(self, args)
+ # 3.9 & typing.ParamSpec
+ elif isinstance(param, ParamSpec):
+ i = params.index(param)
+ if (
+ i == len(args)
+ and getattr(param, '__default__', NoDefault) is not NoDefault
+ ):
+ args = [*args, param.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {self}")
+ # Special case for Z[[int, str, bool]] == Z[int, str, bool]
+ if len(params) == 1 and not _is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ elif (
+ isinstance(args[i], list)
+ # 3.9
+ # This class inherits from list do not convert
+ and not isinstance(args[i], _ConcatenateGenericAlias)
+ ):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
+
+ alen = len(args)
+ plen = len(params)
+ if alen != plen:
+ raise TypeError(
+ f"Too {'many' if alen > plen else 'few'} arguments for {self};"
+ f" actual {alen}, expected {plen}"
+ )
+
+ subst = dict(zip(self.__parameters__, args))
+ # determine new args
+ new_args = []
+ for arg in self.__args__:
+ if isinstance(arg, type):
+ new_args.append(arg)
+ continue
+ if isinstance(arg, TypeVar):
+ arg = subst[arg]
+ if (
+ (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
+ or (
+ hasattr(_types, "GenericAlias")
+ and isinstance(arg, _types.GenericAlias)
+ and getattr(arg, "__unpacked__", False)
+ )
+ ):
+ raise TypeError(f"{arg} is not valid as type argument")
+
+ elif isinstance(arg,
+ typing._GenericAlias
+ if not hasattr(_types, "GenericAlias") else
+ (typing._GenericAlias, _types.GenericAlias)
+ ):
+ subparams = arg.__parameters__
+ if subparams:
+ subargs = tuple(subst[x] for x in subparams)
+ arg = arg[subargs]
+ new_args.append(arg)
+ return self.copy_with(tuple(new_args))
+
+# 3.10+
+else:
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+
+ # 3.10
+ if sys.version_info < (3, 11):
+
+ class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
+ # needed for checks in collections.abc.Callable to accept this class
+ __module__ = "typing"
+
+ def copy_with(self, params):
+ if isinstance(params[-1], (list, tuple)):
+ return (*params[:-1], *params[-1])
+ if isinstance(params[-1], typing._ConcatenateGenericAlias):
+ params = (*params[:-1], *params[-1].__args__)
+ elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ return super(typing._ConcatenateGenericAlias, self).copy_with(params)
+
+ def __getitem__(self, args):
+ value = super().__getitem__(args)
+ if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
+ return tuple(_unpack_args(*(n for n in value)))
+ return value
+
+
+# 3.9.2
+class _EllipsisDummy: ...
+
+
+# <=3.10
+def _create_concatenate_alias(origin, parameters):
+ if parameters[-1] is ... and sys.version_info < (3, 9, 2):
+ # Hack: Arguments must be types, replace it with one.
+ parameters = (*parameters[:-1], _EllipsisDummy)
+ if sys.version_info >= (3, 10, 3):
+ concatenate = _ConcatenateGenericAlias(origin, parameters,
+ _typevar_types=(TypeVar, ParamSpec),
+ _paramspec_tvars=True)
+ else:
+ concatenate = _ConcatenateGenericAlias(origin, parameters)
+ if parameters[-1] is not _EllipsisDummy:
+ return concatenate
+ # Remove dummy again
+ concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
+ for p in concatenate.__args__)
+ if sys.version_info < (3, 10):
+ # backport needs __args__ adjustment only
+ return concatenate
+ concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
+ if p is not _EllipsisDummy)
+ return concatenate
+
+
+# <=3.10
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Concatenate of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ msg = "Concatenate[arg, ...]: each arg must be a type."
+ parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
+ parameters[-1])
+ return _create_concatenate_alias(self, parameters)
+
+
+# 3.11+; Concatenate does not accept ellipsis in 3.10
+# Breakpoint: https://github.com/python/cpython/pull/30969
+if sys.version_info >= (3, 11):
+ Concatenate = typing.Concatenate
+# <=3.10
+else:
+ @_ExtensionsSpecialForm
+ def Concatenate(self, parameters):
+ """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """
+ return _concatenate_getitem(self, parameters)
+
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+ TypeGuard = typing.TypeGuard
+# 3.9
+else:
+ @_ExtensionsSpecialForm
+ def TypeGuard(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+
+# 3.13+
+if hasattr(typing, 'TypeIs'):
+ TypeIs = typing.TypeIs
+# <=3.12
+else:
+ @_ExtensionsSpecialForm
+ def TypeIs(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type narrower function. ``TypeIs`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeIs[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeIs`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the intersection of the type inside ``TypeIs`` and the argument's
+ previously known type.
+
+ For example::
+
+ def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+ return hasattr(val, '__await__')
+
+ def f(val: Union[int, Awaitable[int]]) -> int:
+ if is_awaitable(val):
+ assert_type(val, Awaitable[int])
+ else:
+ assert_type(val, int)
+
+ ``TypeIs`` also works with type variables. For more information, see
+ PEP 742 (Narrowing types with TypeIs).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+
+# 3.14+?
+if hasattr(typing, 'TypeForm'):
+ TypeForm = typing.TypeForm
+# <=3.13
+else:
+ class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
+ # TypeForm(X) is equivalent to X but indicates to the type checker
+ # that the object is a TypeForm.
+ def __call__(self, obj, /):
+ return obj
+
+ @_TypeFormForm
+ def TypeForm(self, parameters):
+ """A special form representing the value that results from the evaluation
+ of a type expression. This value encodes the information supplied in the
+ type expression, and it represents the type described by that type expression.
+
+ When used in a type expression, TypeForm describes a set of type form objects.
+ It accepts a single type argument, which must be a valid type expression.
+ ``TypeForm[T]`` describes the set of all type form objects that represent
+ the type T or types that are assignable to T.
+
+ Usage:
+
+ def cast[T](typ: TypeForm[T], value: Any) -> T: ...
+
+ reveal_type(cast(int, "x")) # int
+
+ See PEP 747 for more information.
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+
+
+
+if hasattr(typing, "LiteralString"): # 3.11+
+ LiteralString = typing.LiteralString
+else:
+ @_SpecialForm
+ def LiteralString(self, params):
+ """Represents an arbitrary literal string.
+
+ Example::
+
+ from typing_extensions import LiteralString
+
+ def query(sql: LiteralString) -> ...:
+ ...
+
+ query("SELECT * FROM table") # ok
+ query(f"SELECT * FROM {input()}") # not ok
+
+ See PEP 675 for details.
+
+ """
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Self"): # 3.11+
+ Self = typing.Self
+else:
+ @_SpecialForm
+ def Self(self, params):
+ """Used to spell the type of "self" in classes.
+
+ Example::
+
+ from typing import Self
+
+ class ReturnsSelf:
+ def parse(self, data: bytes) -> Self:
+ ...
+ return self
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Never"): # 3.11+
+ Never = typing.Never
+else:
+ @_SpecialForm
+ def Never(self, params):
+ """The bottom type, a type that has no members.
+
+ This can be used to define a function that should never be
+ called, or a function that never returns::
+
+ from typing_extensions import Never
+
+ def never_call_me(arg: Never) -> None:
+ pass
+
+ def int_or_str(arg: int | str) -> None:
+ never_call_me(arg) # type checker error
+ match arg:
+ case int():
+ print("It's an int")
+ case str():
+ print("It's a str")
+ case _:
+ never_call_me(arg) # ok, arg is of type Never
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, 'Required'): # 3.11+
+ Required = typing.Required
+ NotRequired = typing.NotRequired
+else: # <=3.10
+ @_ExtensionsSpecialForm
+ def Required(self, parameters):
+ """A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ @_ExtensionsSpecialForm
+ def NotRequired(self, parameters):
+ """A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+
+if hasattr(typing, 'ReadOnly'):
+ ReadOnly = typing.ReadOnly
+else: # <=3.12
+ @_ExtensionsSpecialForm
+ def ReadOnly(self, parameters):
+ """A special typing construct to mark an item of a TypedDict as read-only.
+
+ For example:
+
+ class Movie(TypedDict):
+ title: ReadOnly[str]
+ year: int
+
+ def mutate_movie(m: Movie) -> None:
+ m["year"] = 1992 # allowed
+ m["title"] = "The Matrix" # typechecker error
+
+ There is no runtime checking for this property.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+
+_UNPACK_DOC = """\
+Type unpack operator.
+
+The type unpack operator takes the child types from some container type,
+such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
+example:
+
+ # For some generic class `Foo`:
+ Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str]
+
+ Ts = TypeVarTuple('Ts')
+ # Specifies that `Bar` is generic in an arbitrary number of types.
+ # (Think of `Ts` as a tuple of an arbitrary number of individual
+ # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+ # `Generic[]`.)
+ class Bar(Generic[Unpack[Ts]]): ...
+ Bar[int] # Valid
+ Bar[int, str] # Also valid
+
+From Python 3.11, this can also be done using the `*` operator:
+
+ Foo[*tuple[int, str]]
+ class Bar(Generic[*Ts]): ...
+
+The operator can also be used along with a `TypedDict` to annotate
+`**kwargs` in a function signature. For instance:
+
+ class Movie(TypedDict):
+ name: str
+ year: int
+
+ # This function expects two keyword arguments - *name* of type `str` and
+ # *year* of type `int`.
+ def foo(**kwargs: Unpack[Movie]): ...
+
+Note that there is only some runtime checking of this operator. Not
+everything the runtime allows may be accepted by static type checkers.
+
+For more information, see PEP 646 and PEP 692.
+"""
+
+
+# PEP 692 changed the repr of Unpack[]
+# Breakpoint: https://github.com/python/cpython/pull/104048
+if sys.version_info >= (3, 12):
+ Unpack = typing.Unpack
+
+ def _is_unpack(obj):
+ return get_origin(obj) is Unpack
+
+else: # <=3.11
+ class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
+ def __init__(self, getitem):
+ super().__init__(getitem)
+ self.__doc__ = _UNPACK_DOC
+
+ class _UnpackAlias(typing._GenericAlias, _root=True):
+ if sys.version_info < (3, 11):
+ # needed for compatibility with Generic[Unpack[Ts]]
+ __class__ = typing.TypeVar
+
+ @property
+ def __typing_unpacked_tuple_args__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ arg, = self.__args__
+ if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+ if arg.__origin__ is not tuple:
+ raise TypeError("Unpack[...] must be used with a tuple type")
+ return arg.__args__
+ return None
+
+ @property
+ def __typing_is_unpacked_typevartuple__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ return isinstance(self.__args__[0], TypeVarTuple)
+
+ def __getitem__(self, args):
+ if self.__typing_is_unpacked_typevartuple__:
+ return args
+ return super().__getitem__(args)
+
+ @_UnpackSpecialForm
+ def Unpack(self, parameters):
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return _UnpackAlias(self, (item,))
+
+ def _is_unpack(obj):
+ return isinstance(obj, _UnpackAlias)
+
+
+def _unpack_args(*args):
+ newargs = []
+ for arg in args:
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs is not None and (not (subargs and subargs[-1] is ...)):
+ newargs.extend(subargs)
+ else:
+ newargs.append(arg)
+ return newargs
+
+
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVarTuple
+
+elif hasattr(typing, "TypeVarTuple"): # 3.11+
+
+ # Add default parameter - PEP 696
+ class TypeVarTuple(metaclass=_TypeVarLikeMeta):
+ """Type variable tuple."""
+
+ _backported_typevarlike = typing.TypeVarTuple
+
+ def __new__(cls, name, *, default=NoDefault):
+ tvt = typing.TypeVarTuple(name)
+ _set_default(tvt, default)
+ _set_module(tvt)
+
+ def _typevartuple_prepare_subst(alias, args):
+ params = alias.__parameters__
+ typevartuple_index = params.index(tvt)
+ for param in params[typevartuple_index + 1:]:
+ if isinstance(param, TypeVarTuple):
+ raise TypeError(
+ f"More than one TypeVarTuple parameter in {alias}"
+ )
+
+ alen = len(args)
+ plen = len(params)
+ left = typevartuple_index
+ right = plen - typevartuple_index - 1
+ var_tuple_index = None
+ fillarg = None
+ for k, arg in enumerate(args):
+ if not isinstance(arg, type):
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs and len(subargs) == 2 and subargs[-1] is ...:
+ if var_tuple_index is not None:
+ raise TypeError(
+ "More than one unpacked "
+ "arbitrary-length tuple argument"
+ )
+ var_tuple_index = k
+ fillarg = subargs[0]
+ if var_tuple_index is not None:
+ left = min(left, var_tuple_index)
+ right = min(right, alen - var_tuple_index - 1)
+ elif left + right > alen:
+ raise TypeError(f"Too few arguments for {alias};"
+ f" actual {alen}, expected at least {plen - 1}")
+ if left == alen - right and tvt.has_default():
+ replacement = _unpack_args(tvt.__default__)
+ else:
+ replacement = args[left: alen - right]
+
+ return (
+ *args[:left],
+ *([fillarg] * (typevartuple_index - left)),
+ replacement,
+ *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+ *args[alen - right:],
+ )
+
+ tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
+ return tvt
+
+ def __init_subclass__(self, *args, **kwds):
+ raise TypeError("Cannot subclass special typing classes")
+
+else: # <=3.10
+ class TypeVarTuple(_DefaultMixin):
+ """Type variable tuple.
+
+ Usage::
+
+ Ts = TypeVarTuple('Ts')
+
+ In the same way that a normal type variable is a stand-in for a single
+ type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
+ type such as ``Tuple[int, str]``.
+
+ Type variable tuples can be used in ``Generic`` declarations.
+ Consider the following example::
+
+ class Array(Generic[*Ts]): ...
+
+ The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
+ where ``T1`` and ``T2`` are type variables. To use these type variables
+ as type parameters of ``Array``, we must *unpack* the type variable tuple using
+ the star operator: ``*Ts``. The signature of ``Array`` then behaves
+ as if we had simply written ``class Array(Generic[T1, T2]): ...``.
+ In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
+ us to parameterise the class with an *arbitrary* number of type parameters.
+
+ Type variable tuples can be used anywhere a normal ``TypeVar`` can.
+ This includes class definitions, as shown above, as well as function
+ signatures and variable annotations::
+
+ class Array(Generic[*Ts]):
+
+ def __init__(self, shape: Tuple[*Ts]):
+ self._shape: Tuple[*Ts] = shape
+
+ def get_shape(self) -> Tuple[*Ts]:
+ return self._shape
+
+ shape = (Height(480), Width(640))
+ x: Array[Height, Width] = Array(shape)
+ y = abs(x) # Inferred type is Array[Height, Width]
+ z = x + x # ... is Array[Height, Width]
+ x.get_shape() # ... is tuple[Height, Width]
+
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ def __iter__(self):
+ yield self.__unpacked__
+
+ def __init__(self, name, *, default=NoDefault):
+ self.__name__ = name
+ _DefaultMixin.__init__(self, default)
+
+ # for pickling:
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ self.__unpacked__ = Unpack[self]
+
+ def __repr__(self):
+ return self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ def __init_subclass__(self, *args, **kwds):
+ if '_root' not in kwds:
+ raise TypeError("Cannot subclass special typing classes")
+
+
+if hasattr(typing, "reveal_type"): # 3.11+
+ reveal_type = typing.reveal_type
+else: # <=3.10
+ def reveal_type(obj: T, /) -> T:
+ """Reveal the inferred type of a variable.
+
+ When a static type checker encounters a call to ``reveal_type()``,
+ it will emit the inferred type of the argument::
+
+ x: int = 1
+ reveal_type(x)
+
+ Running a static type checker (e.g., ``mypy``) on this example
+ will produce output similar to 'Revealed type is "builtins.int"'.
+
+ At runtime, the function prints the runtime type of the
+ argument and returns it unchanged.
+
+ """
+ print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+ return obj
+
+
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
+ _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else: # <=3.10
+ _ASSERT_NEVER_REPR_MAX_LENGTH = 100
+
+
+if hasattr(typing, "assert_never"): # 3.11+
+ assert_never = typing.assert_never
+else: # <=3.10
+ def assert_never(arg: Never, /) -> Never:
+ """Assert to the type checker that a line of code is unreachable.
+
+ Example::
+
+ def int_or_str(arg: int | str) -> None:
+ match arg:
+ case int():
+ print("It's an int")
+ case str():
+ print("It's a str")
+ case _:
+ assert_never(arg)
+
+ If a type checker finds that a call to assert_never() is
+ reachable, it will emit an error.
+
+ At runtime, this throws an exception when called.
+
+ """
+ value = repr(arg)
+ if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+ value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+ raise AssertionError(f"Expected code to be unreachable, but got: {value}")
+
+
+# dataclass_transform exists in 3.11 but lacks the frozen_default parameter
+# Breakpoint: https://github.com/python/cpython/pull/99958
+if sys.version_info >= (3, 12): # 3.12+
+ dataclass_transform = typing.dataclass_transform
+else: # <=3.11
+ def dataclass_transform(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ frozen_default: bool = False,
+ field_specifiers: typing.Tuple[
+ typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
+ ...
+ ] = (),
+ **kwargs: typing.Any,
+ ) -> typing.Callable[[T], T]:
+ """Decorator that marks a function, class, or metaclass as providing
+ dataclass-like behavior.
+
+ Example:
+
+ from typing_extensions import dataclass_transform
+
+ _T = TypeVar("_T")
+
+ # Used on a decorator function
+ @dataclass_transform()
+ def create_model(cls: type[_T]) -> type[_T]:
+ ...
+ return cls
+
+ @create_model
+ class CustomerModel:
+ id: int
+ name: str
+
+ # Used on a base class
+ @dataclass_transform()
+ class ModelBase: ...
+
+ class CustomerModel(ModelBase):
+ id: int
+ name: str
+
+ # Used on a metaclass
+ @dataclass_transform()
+ class ModelMeta(type): ...
+
+ class ModelBase(metaclass=ModelMeta): ...
+
+ class CustomerModel(ModelBase):
+ id: int
+ name: str
+
+ Each of the ``CustomerModel`` classes defined in this example will now
+ behave similarly to a dataclass created with the ``@dataclasses.dataclass``
+ decorator. For example, the type checker will synthesize an ``__init__``
+ method.
+
+ The arguments to this decorator can be used to customize this behavior:
+ - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+ True or False if it is omitted by the caller.
+ - ``order_default`` indicates whether the ``order`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``field_specifiers`` specifies a static list of supported classes
+ or functions that describe fields, similar to ``dataclasses.field()``.
+
+ At runtime, this decorator records its arguments in the
+ ``__dataclass_transform__`` attribute on the decorated object.
+
+ See PEP 681 for details.
+
+ """
+ def decorator(cls_or_fn):
+ cls_or_fn.__dataclass_transform__ = {
+ "eq_default": eq_default,
+ "order_default": order_default,
+ "kw_only_default": kw_only_default,
+ "frozen_default": frozen_default,
+ "field_specifiers": field_specifiers,
+ "kwargs": kwargs,
+ }
+ return cls_or_fn
+ return decorator
+
+
+if hasattr(typing, "override"): # 3.12+
+ override = typing.override
+else: # <=3.11
+ _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
+
+ def override(arg: _F, /) -> _F:
+ """Indicate that a method is intended to override a method in a base class.
+
+ Usage:
+
+ class Base:
+ def method(self) -> None:
+ pass
+
+ class Child(Base):
+ @override
+ def method(self) -> None:
+ super().method()
+
+ When this decorator is applied to a method, the type checker will
+ validate that it overrides a method with the same name on a base class.
+ This helps prevent bugs that may occur when a base class is changed
+ without an equivalent change to a child class.
+
+ There is no runtime checking of these properties. The decorator
+ sets the ``__override__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+
+ See PEP 698 for details.
+
+ """
+ try:
+ arg.__override__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
+ return arg
+
+
+# Python 3.13.3+ contains a fix for the wrapped __new__
+# Breakpoint: https://github.com/python/cpython/pull/132160
+if sys.version_info >= (3, 13, 3):
+ deprecated = warnings.deprecated
+else:
+ _T = typing.TypeVar("_T")
+
+ class deprecated:
+ """Indicate that a class, function or overload is deprecated.
+
+ When this decorator is applied to an object, the type checker
+ will generate a diagnostic on usage of the deprecated object.
+
+ Usage:
+
+ @deprecated("Use B instead")
+ class A:
+ pass
+
+ @deprecated("Use g instead")
+ def f():
+ pass
+
+ @overload
+ @deprecated("int support is deprecated")
+ def g(x: int) -> int: ...
+ @overload
+ def g(x: str) -> int: ...
+
+ The warning specified by *category* will be emitted at runtime
+ on use of deprecated objects. For functions, that happens on calls;
+ for classes, on instantiation and on creation of subclasses.
+ If the *category* is ``None``, no warning is emitted at runtime.
+ The *stacklevel* determines where the
+ warning is emitted. If it is ``1`` (the default), the warning
+ is emitted at the direct caller of the deprecated object; if it
+ is higher, it is emitted further up the stack.
+ Static type checker behavior is not affected by the *category*
+ and *stacklevel* arguments.
+
+ The deprecation message passed to the decorator is saved in the
+ ``__deprecated__`` attribute on the decorated object.
+ If applied to an overload, the decorator
+ must be after the ``@overload`` decorator for the attribute to
+ exist on the overload as returned by ``get_overloads()``.
+
+ See PEP 702 for details.
+
+ """
+ def __init__(
+ self,
+ message: str,
+ /,
+ *,
+ category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+ stacklevel: int = 1,
+ ) -> None:
+ if not isinstance(message, str):
+ raise TypeError(
+ "Expected an object of type str for 'message', not "
+ f"{type(message).__name__!r}"
+ )
+ self.message = message
+ self.category = category
+ self.stacklevel = stacklevel
+
+ def __call__(self, arg: _T, /) -> _T:
+ # Make sure the inner functions created below don't
+ # retain a reference to self.
+ msg = self.message
+ category = self.category
+ stacklevel = self.stacklevel
+ if category is None:
+ arg.__deprecated__ = msg
+ return arg
+ elif isinstance(arg, type):
+ import functools
+ from types import MethodType
+
+ original_new = arg.__new__
+
+ @functools.wraps(original_new)
+ def __new__(cls, /, *args, **kwargs):
+ if cls is arg:
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ if original_new is not object.__new__:
+ return original_new(cls, *args, **kwargs)
+ # Mirrors a similar check in object.__new__.
+ elif cls.__init__ is object.__init__ and (args or kwargs):
+ raise TypeError(f"{cls.__name__}() takes no arguments")
+ else:
+ return original_new(cls)
+
+ arg.__new__ = staticmethod(__new__)
+
+ original_init_subclass = arg.__init_subclass__
+ # We need slightly different behavior if __init_subclass__
+ # is a bound method (likely if it was implemented in Python)
+ if isinstance(original_init_subclass, MethodType):
+ original_init_subclass = original_init_subclass.__func__
+
+ @functools.wraps(original_init_subclass)
+ def __init_subclass__(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return original_init_subclass(*args, **kwargs)
+
+ arg.__init_subclass__ = classmethod(__init_subclass__)
+ # Or otherwise, which likely means it's a builtin such as
+ # object's implementation of __init_subclass__.
+ else:
+ @functools.wraps(original_init_subclass)
+ def __init_subclass__(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return original_init_subclass(*args, **kwargs)
+
+ arg.__init_subclass__ = __init_subclass__
+
+ arg.__deprecated__ = __new__.__deprecated__ = msg
+ __init_subclass__.__deprecated__ = msg
+ return arg
+ elif callable(arg):
+ import asyncio.coroutines
+ import functools
+ import inspect
+
+ @functools.wraps(arg)
+ def wrapper(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return arg(*args, **kwargs)
+
+ if asyncio.coroutines.iscoroutinefunction(arg):
+ # Breakpoint: https://github.com/python/cpython/pull/99247
+ if sys.version_info >= (3, 12):
+ wrapper = inspect.markcoroutinefunction(wrapper)
+ else:
+ wrapper._is_coroutine = asyncio.coroutines._is_coroutine
+
+ arg.__deprecated__ = wrapper.__deprecated__ = msg
+ return wrapper
+ else:
+ raise TypeError(
+ "@deprecated decorator with non-None category must be applied to "
+ f"a class or callable, not {arg!r}"
+ )
+
+# Breakpoint: https://github.com/python/cpython/pull/23702
+if sys.version_info < (3, 10):
+ def _is_param_expr(arg):
+ return arg is ... or isinstance(
+ arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
+ )
+else:
+ def _is_param_expr(arg):
+ return arg is ... or isinstance(
+ arg,
+ (
+ tuple,
+ list,
+ ParamSpec,
+ _ConcatenateGenericAlias,
+ typing._ConcatenateGenericAlias,
+ ),
+ )
+
+
+# We have to do some monkey patching to deal with the dual nature of
+# Unpack/TypeVarTuple:
+# - We want Unpack to be a kind of TypeVar so it gets accepted in
+# Generic[Unpack[Ts]]
+# - We want it to *not* be treated as a TypeVar for the purposes of
+# counting generic parameters, so that when we subscript a generic,
+# the runtime doesn't try to substitute the Unpack with the subscripted type.
+if not hasattr(typing, "TypeVarTuple"):
+ def _check_generic(cls, parameters, elen=_marker):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ # If substituting a single ParamSpec with multiple arguments
+ # we do not check the count
+ if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
+ and len(cls.__parameters__) == 1
+ and isinstance(cls.__parameters__[0], ParamSpec)
+ and parameters
+ and not _is_param_expr(parameters[0])
+ ):
+ # Generic modifies parameters variable, but here we cannot do this
+ return
+
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ if elen is _marker:
+ if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+ raise TypeError(f"{cls} is not a generic class")
+ elen = len(cls.__parameters__)
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+ num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+ if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+ return
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ # Breakpoint: https://github.com/python/cpython/pull/27515
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+ # Python 3.11+
+
+ def _check_generic(cls, parameters, elen):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+
+if not _PEP_696_IMPLEMENTED:
+ typing._check_generic = _check_generic
+
+
+def _has_generic_or_protocol_as_origin() -> bool:
+ try:
+ frame = sys._getframe(2)
+ # - Catch AttributeError: not all Python implementations have sys._getframe()
+ # - Catch ValueError: maybe we're called from an unexpected module
+ # and the call stack isn't deep enough
+ except (AttributeError, ValueError):
+ return False # err on the side of leniency
+ else:
+ # If we somehow get invoked from outside typing.py,
+ # also err on the side of leniency
+ if frame.f_globals.get("__name__") != "typing":
+ return False
+ origin = frame.f_locals.get("origin")
+ # Cannot use "in" because origin may be an object with a buggy __eq__ that
+ # throws an error.
+ return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+ if get_origin(x) is not Unpack:
+ return False
+ args = get_args(x)
+ return (
+ bool(args)
+ and len(args) == 1
+ and type(args[0]) in _TYPEVARTUPLE_TYPES
+ )
+
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+ def _collect_type_vars(types, typevar_types=None):
+ """Collect all type variable contained in types in order of
+ first appearance (lexicographic order). For example::
+
+ _collect_type_vars((T, List[S, T])) == (T, S)
+ """
+ if typevar_types is None:
+ typevar_types = typing.TypeVar
+ tvars = []
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with a default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
+ default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
+ for t in types:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ elif (
+ isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
+ and t not in tvars
+ ):
+ if enforce_default_ordering:
+ has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+ if has_default:
+ if type_var_tuple_encountered:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ tvars.append(t)
+ if _should_collect_from_parameters(t):
+ tvars.extend([t for t in t.__parameters__ if t not in tvars])
+ elif isinstance(t, tuple):
+ # Collect nested type_vars
+ # tuple wrapped by _prepare_paramspec_params(cls, params)
+ for x in t:
+ for collected in _collect_type_vars([x]):
+ if collected not in tvars:
+ tvars.append(collected)
+ return tuple(tvars)
+
+ typing._collect_type_vars = _collect_type_vars
+else:
+ def _collect_parameters(args):
+ """Collect all type variables and parameter specifications in args
+ in order of first appearance (lexicographic order).
+
+ For example::
+
+ assert _collect_parameters((T, Callable[P, T])) == (T, P)
+ """
+ parameters = []
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
+ default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
+ for t in args:
+ if isinstance(t, type):
+ # We don't want __parameters__ descriptor of a bare Python class.
+ pass
+ elif isinstance(t, tuple):
+ # `t` might be a tuple, when `ParamSpec` is substituted with
+ # `[T, int]`, or `[int, *Ts]`, etc.
+ for x in t:
+ for collected in _collect_parameters([x]):
+ if collected not in parameters:
+ parameters.append(collected)
+ elif hasattr(t, '__typing_subst__'):
+ if t not in parameters:
+ if enforce_default_ordering:
+ has_default = (
+ getattr(t, '__default__', NoDefault) is not NoDefault
+ )
+
+ if type_var_tuple_encountered and has_default:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+
+ if has_default:
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ parameters.append(t)
+ else:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ for x in getattr(t, '__parameters__', ()):
+ if x not in parameters:
+ parameters.append(x)
+
+ return tuple(parameters)
+
+ if not _PEP_696_IMPLEMENTED:
+ typing._collect_parameters = _collect_parameters
+
+# Backport typing.NamedTuple as it exists in Python 3.13.
+# In 3.11, the ability to define generic `NamedTuple`s was supported.
+# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
+# On 3.12, we added __orig_bases__ to call-based NamedTuples
+# On 3.13, we deprecated kwargs-based NamedTuples
+# Breakpoint: https://github.com/python/cpython/pull/105609
+if sys.version_info >= (3, 13):
+ NamedTuple = typing.NamedTuple
+else:
+ def _make_nmtuple(name, types, module, defaults=()):
+ fields = [n for n, t in types]
+ annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
+ for n, t in types}
+ nm_tpl = collections.namedtuple(name, fields,
+ defaults=defaults, module=module)
+ nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
+ return nm_tpl
+
+ _prohibited_namedtuple_fields = typing._prohibited
+ _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
+
+ class _NamedTupleMeta(type):
+ def __new__(cls, typename, bases, ns):
+ assert _NamedTuple in bases
+ for base in bases:
+ if base is not _NamedTuple and base is not typing.Generic:
+ raise TypeError(
+ 'can only inherit from a NamedTuple type and Generic')
+ bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+ if "__annotations__" in ns:
+ types = ns["__annotations__"]
+ elif "__annotate__" in ns:
+ # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+ types = ns["__annotate__"](1)
+ else:
+ types = {}
+ default_names = []
+ for field_name in types:
+ if field_name in ns:
+ default_names.append(field_name)
+ elif default_names:
+ raise TypeError(f"Non-default namedtuple field {field_name} "
+ f"cannot follow default field"
+ f"{'s' if len(default_names) > 1 else ''} "
+ f"{', '.join(default_names)}")
+ nm_tpl = _make_nmtuple(
+ typename, types.items(),
+ defaults=[ns[n] for n in default_names],
+ module=ns['__module__']
+ )
+ nm_tpl.__bases__ = bases
+ if typing.Generic in bases:
+ if hasattr(typing, '_generic_class_getitem'): # 3.12+
+ nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
+ else:
+ class_getitem = typing.Generic.__class_getitem__.__func__
+ nm_tpl.__class_getitem__ = classmethod(class_getitem)
+ # update from user namespace without overriding special namedtuple attributes
+ for key, val in ns.items():
+ if key in _prohibited_namedtuple_fields:
+ raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+ elif key not in _special_namedtuple_fields:
+ if key not in nm_tpl._fields:
+ setattr(nm_tpl, key, ns[key])
+ try:
+ set_name = type(val).__set_name__
+ except AttributeError:
+ pass
+ else:
+ try:
+ set_name(val, nm_tpl, key)
+ except BaseException as e:
+ msg = (
+ f"Error calling __set_name__ on {type(val).__name__!r} "
+ f"instance {key!r} in {typename!r}"
+ )
+ # BaseException.add_note() existed on py311,
+ # but the __set_name__ machinery didn't start
+ # using add_note() until py312.
+ # Making sure exceptions are raised in the same way
+ # as in "normal" classes seems most important here.
+ # Breakpoint: https://github.com/python/cpython/pull/95915
+ if sys.version_info >= (3, 12):
+ e.add_note(msg)
+ raise
+ else:
+ raise RuntimeError(msg) from e
+
+ if typing.Generic in bases:
+ nm_tpl.__init_subclass__()
+ return nm_tpl
+
+ _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
+
+ def _namedtuple_mro_entries(bases):
+ assert NamedTuple in bases
+ return (_NamedTuple,)
+
+ def NamedTuple(typename, fields=_marker, /, **kwargs):
+ """Typed version of namedtuple.
+
+ Usage::
+
+ class Employee(NamedTuple):
+ name: str
+ id: int
+
+ This is equivalent to::
+
+ Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+ The resulting class has an extra __annotations__ attribute, giving a
+ dict that maps field names to types. (The field names are also in
+ the _fields attribute, which is part of the namedtuple API.)
+ An alternative equivalent functional syntax is also accepted::
+
+ Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+ """
+ if fields is _marker:
+ if kwargs:
+ deprecated_thing = "Creating NamedTuple classes using keyword arguments"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "Use the class-based or functional syntax instead."
+ )
+ else:
+ deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+ example = f"`{typename} = NamedTuple({typename!r}, [])`"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. "
+ ) + example + "."
+ elif fields is None:
+ if kwargs:
+ raise TypeError(
+ "Cannot pass `None` as the 'fields' parameter "
+ "and also specify fields using keyword arguments"
+ )
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+ example = f"`{typename} = NamedTuple({typename!r}, [])`"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. "
+ ) + example + "."
+ elif kwargs:
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
+ if fields is _marker or fields is None:
+ warnings.warn(
+ deprecation_msg.format(name=deprecated_thing, remove="3.15"),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ fields = kwargs.items()
+ nt = _make_nmtuple(typename, fields, module=_caller())
+ nt.__orig_bases__ = (NamedTuple,)
+ return nt
+
+ NamedTuple.__mro_entries__ = _namedtuple_mro_entries
+
+
+if hasattr(collections.abc, "Buffer"):
+ Buffer = collections.abc.Buffer
+else:
+ class Buffer(abc.ABC): # noqa: B024
+ """Base class for classes that implement the buffer protocol.
+
+ The buffer protocol allows Python objects to expose a low-level
+ memory buffer interface. Before Python 3.12, it is not possible
+ to implement the buffer protocol in pure Python code, or even
+ to check whether a class implements the buffer protocol. In
+ Python 3.12 and higher, the ``__buffer__`` method allows access
+ to the buffer protocol from Python code, and the
+ ``collections.abc.Buffer`` ABC allows checking whether a class
+ implements the buffer protocol.
+
+ To indicate support for the buffer protocol in earlier versions,
+ inherit from this ABC, either in a stub file or at runtime,
+ or use ABC registration. This ABC provides no methods, because
+ there is no Python-accessible methods shared by pre-3.12 buffer
+ classes. It is useful primarily for static checks.
+
+ """
+
+ # As a courtesy, register the most common stdlib buffer classes.
+ Buffer.register(memoryview)
+ Buffer.register(bytearray)
+ Buffer.register(bytes)
+
+
+# Backport of types.get_original_bases, available on 3.12+ in CPython
+if hasattr(_types, "get_original_bases"):
+ get_original_bases = _types.get_original_bases
+else:
+ def get_original_bases(cls, /):
+ """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+ Examples::
+
+ from typing import TypeVar, Generic
+ from typing_extensions import NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert get_original_bases(Bar) == (Foo[int], float)
+ assert get_original_bases(Baz) == (list[str],)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+ assert get_original_bases(Spam) == (TypedDict,)
+ assert get_original_bases(int) == (object,)
+ """
+ try:
+ return cls.__dict__.get("__orig_bases__", cls.__bases__)
+ except AttributeError:
+ raise TypeError(
+ f'Expected an instance of type, not {type(cls).__name__!r}'
+ ) from None
+
+
+# NewType is a class on Python 3.10+, making it pickleable
+# The error message for subclassing instances of NewType was improved on 3.11+
+# Breakpoint: https://github.com/python/cpython/pull/30268
+if sys.version_info >= (3, 11):
+ NewType = typing.NewType
+else:
+ class NewType:
+ """NewType creates simple unique types with almost zero
+ runtime overhead. NewType(name, tp) is considered a subtype of tp
+ by static type checkers. At runtime, NewType(name, tp) returns
+ a dummy callable that simply returns its argument. Usage::
+ UserId = NewType('UserId', int)
+ def name_by_id(user_id: UserId) -> str:
+ ...
+ UserId('user') # Fails type check
+ name_by_id(42) # Fails type check
+ name_by_id(UserId(42)) # OK
+ num = UserId(5) + 1 # type: int
+ """
+
+ def __call__(self, obj, /):
+ return obj
+
+ def __init__(self, name, tp):
+ self.__qualname__ = name
+ if '.' in name:
+ name = name.rpartition('.')[-1]
+ self.__name__ = name
+ self.__supertype__ = tp
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __mro_entries__(self, bases):
+ # We defined __mro_entries__ to get a better error message
+ # if a user attempts to subclass a NewType instance. bpo-46170
+ supercls_name = self.__name__
+
+ class Dummy:
+ def __init_subclass__(cls):
+ subcls_name = cls.__name__
+ raise TypeError(
+ f"Cannot subclass an instance of NewType. "
+ f"Perhaps you were looking for: "
+ f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
+ )
+
+ return (Dummy,)
+
+ def __repr__(self):
+ return f'{self.__module__}.{self.__qualname__}'
+
+ def __reduce__(self):
+ return self.__qualname__
+
+ # Breakpoint: https://github.com/python/cpython/pull/21515
+ if sys.version_info >= (3, 10):
+ # PEP 604 methods
+ # It doesn't make sense to have these methods on Python <3.10
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+
+# Breakpoint: https://github.com/python/cpython/pull/124795
+if sys.version_info >= (3, 14):
+ TypeAliasType = typing.TypeAliasType
+# <=3.13
+else:
+ # Breakpoint: https://github.com/python/cpython/pull/103764
+ if sys.version_info >= (3, 12):
+ # 3.12-3.13
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ typing.TypeAliasType,
+ TypeAliasType,
+ ))
+ else:
+ # <=3.11
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ TypeAliasType,
+ ))
+
+ if sys.version_info < (3, 10):
+ # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
+ # so that we emulate the behaviour of `types.GenericAlias`
+ # on the latest versions of CPython
+ _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
+ "__class__",
+ "__bases__",
+ "__origin__",
+ "__args__",
+ "__unpacked__",
+ "__parameters__",
+ "__typing_unpacked_tuple_args__",
+ "__mro_entries__",
+ "__reduce_ex__",
+ "__reduce__",
+ "__copy__",
+ "__deepcopy__",
+ })
+
+ class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
+ def __getattr__(self, attr):
+ if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
+ return object.__getattr__(self, attr)
+ return getattr(self.__origin__, attr)
+
+
+ class TypeAliasType:
+ """Create named, parameterized type aliases.
+
+ This provides a backport of the new `type` statement in Python 3.12:
+
+ type ListOrSet[T] = list[T] | set[T]
+
+ is equivalent to:
+
+ T = TypeVar("T")
+ ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
+
+ The name ListOrSet can then be used as an alias for the type it refers to.
+
+ The type_params argument should contain all the type parameters used
+ in the value of the type alias. If the alias is not generic, this
+ argument is omitted.
+
+ Static type checkers should only support type aliases declared using
+ TypeAliasType that follow these rules:
+
+ - The first argument (the name) must be a string literal.
+ - The TypeAliasType instance must be immediately assigned to a variable
+ of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
+ as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
+
+ """
+
+ def __init__(self, name: str, value, *, type_params=()):
+ if not isinstance(name, str):
+ raise TypeError("TypeAliasType name must be a string")
+ if not isinstance(type_params, tuple):
+ raise TypeError("type_params must be a tuple")
+ self.__value__ = value
+ self.__type_params__ = type_params
+
+ default_value_encountered = False
+ parameters = []
+ for type_param in type_params:
+ if (
+ not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
+ # <=3.11
+ # Unpack Backport passes isinstance(type_param, TypeVar)
+ or _is_unpack(type_param)
+ ):
+ raise TypeError(f"Expected a type param, got {type_param!r}")
+ has_default = (
+ getattr(type_param, '__default__', NoDefault) is not NoDefault
+ )
+ if default_value_encountered and not has_default:
+ raise TypeError(f"non-default type parameter '{type_param!r}'"
+ " follows default type parameter")
+ if has_default:
+ default_value_encountered = True
+ if isinstance(type_param, TypeVarTuple):
+ parameters.extend(type_param)
+ else:
+ parameters.append(type_param)
+ self.__parameters__ = tuple(parameters)
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+ # Setting this attribute closes the TypeAliasType from further modification
+ self.__name__ = name
+
+ def __setattr__(self, name: str, value: object, /) -> None:
+ if hasattr(self, "__name__"):
+ self._raise_attribute_error(name)
+ super().__setattr__(name, value)
+
+ def __delattr__(self, name: str, /) -> Never:
+ self._raise_attribute_error(name)
+
+ def _raise_attribute_error(self, name: str) -> Never:
+ # Match the Python 3.12 error messages exactly
+ if name == "__name__":
+ raise AttributeError("readonly attribute")
+ elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
+ raise AttributeError(
+ f"attribute '{name}' of 'typing.TypeAliasType' objects "
+ "is not writable"
+ )
+ else:
+ raise AttributeError(
+ f"'typing.TypeAliasType' object has no attribute '{name}'"
+ )
+
+ def __repr__(self) -> str:
+ return self.__name__
+
+ if sys.version_info < (3, 11):
+ def _check_single_param(self, param, recursion=0):
+ # Allow [], [int], [int, str], [int, ...], [int, T]
+ if param is ...:
+ return ...
+ if param is None:
+ return None
+ # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
+ if isinstance(param, list) and recursion == 0:
+ return [self._check_single_param(arg, recursion+1)
+ for arg in param]
+ return typing._type_check(
+ param, f'Subscripting {self.__name__} requires a type.'
+ )
+
+ def _check_parameters(self, parameters):
+ if sys.version_info < (3, 11):
+ return tuple(
+ self._check_single_param(item)
+ for item in parameters
+ )
+ return tuple(typing._type_check(
+ item, f'Subscripting {self.__name__} requires a type.'
+ )
+ for item in parameters
+ )
+
+ def __getitem__(self, parameters):
+ if not self.__type_params__:
+ raise TypeError("Only generic type aliases are subscriptable")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ # Using 3.9 here will create problems with Concatenate
+ if sys.version_info >= (3, 10):
+ return _types.GenericAlias(self, parameters)
+ type_vars = _collect_type_vars(parameters)
+ parameters = self._check_parameters(parameters)
+ alias = _TypeAliasGenericAlias(self, parameters)
+ # alias.__parameters__ is not complete if Concatenate is present
+ # as it is converted to a list from which no parameters are extracted.
+ if alias.__parameters__ != type_vars:
+ alias.__parameters__ = type_vars
+ return alias
+
+ def __reduce__(self):
+ return self.__name__
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
+ )
+
+ # The presence of this method convinces typing._type_check
+ # that TypeAliasTypes are types.
+ def __call__(self):
+ raise TypeError("Type alias is not callable")
+
+ # Breakpoint: https://github.com/python/cpython/pull/21515
+ if sys.version_info >= (3, 10):
+ def __or__(self, right):
+ # For forward compatibility with 3.12, reject Unions
+ # that are not accepted by the built-in Union.
+ if not _is_unionable(right):
+ return NotImplemented
+ return typing.Union[self, right]
+
+ def __ror__(self, left):
+ if not _is_unionable(left):
+ return NotImplemented
+ return typing.Union[left, self]
+
+
+if hasattr(typing, "is_protocol"):
+ is_protocol = typing.is_protocol
+ get_protocol_members = typing.get_protocol_members
+else:
+ def is_protocol(tp: type, /) -> bool:
+ """Return True if the given type is a Protocol.
+
+ Example::
+
+ >>> from typing_extensions import Protocol, is_protocol
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> is_protocol(P)
+ True
+ >>> is_protocol(int)
+ False
+ """
+ return (
+ isinstance(tp, type)
+ and getattr(tp, '_is_protocol', False)
+ and tp is not Protocol
+ and tp is not typing.Protocol
+ )
+
+ def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
+ """Return the set of members defined in a Protocol.
+
+ Example::
+
+ >>> from typing_extensions import Protocol, get_protocol_members
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> get_protocol_members(P)
+ frozenset({'a', 'b'})
+
+ Raise a TypeError for arguments that are not Protocols.
+ """
+ if not is_protocol(tp):
+ raise TypeError(f'{tp!r} is not a Protocol')
+ if hasattr(tp, '__protocol_attrs__'):
+ return frozenset(tp.__protocol_attrs__)
+ return frozenset(_get_protocol_attrs(tp))
+
+
+if hasattr(typing, "Doc"):
+ Doc = typing.Doc
+else:
+ class Doc:
+ """Define the documentation of a type annotation using ``Annotated``, to be
+ used in class attributes, function and method parameters, return values,
+ and variables.
+
+ The value should be a positional-only string literal to allow static tools
+ like editors and documentation generators to use it.
+
+ This complements docstrings.
+
+ The string value passed is available in the attribute ``documentation``.
+
+ Example::
+
+ >>> from typing_extensions import Annotated, Doc
+ >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+ """
+ def __init__(self, documentation: str, /) -> None:
+ self.documentation = documentation
+
+ def __repr__(self) -> str:
+ return f"Doc({self.documentation!r})"
+
+ def __hash__(self) -> int:
+ return hash(self.documentation)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Doc):
+ return NotImplemented
+ return self.documentation == other.documentation
+
+
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+ try:
+ import _socket
+ except ImportError:
+ pass
+ else:
+ _CAPI = getattr(_socket, "CAPI", None)
+ if _CAPI is not None:
+ _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+ CapsuleType = _CapsuleType
+ __all__.append("CapsuleType")
+
+
+if sys.version_info >= (3, 14):
+ from annotationlib import Format, get_annotations
+else:
+ # Available since Python 3.14.0a3
+ # PR: https://github.com/python/cpython/pull/124415
+ class Format(enum.IntEnum):
+ VALUE = 1
+ VALUE_WITH_FAKE_GLOBALS = 2
+ FORWARDREF = 3
+ STRING = 4
+
+ # Available since Python 3.14.0a1
+ # PR: https://github.com/python/cpython/pull/119891
+ def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
+ format=Format.VALUE):
+ """Compute the annotations dict for an object.
+
+ obj may be a callable, class, or module.
+ Passing in an object of any other type raises TypeError.
+
+ Returns a dict. get_annotations() returns a new dict every time
+ it's called; calling it twice on the same object will return two
+ different but equivalent dicts.
+
+ This is a backport of `inspect.get_annotations`, which has been
+ in the standard library since Python 3.10. See the standard library
+ documentation for more:
+
+ https://docs.python.org/3/library/inspect.html#inspect.get_annotations
+
+ This backport adds the *format* argument introduced by PEP 649. The
+ three formats supported are:
+ * VALUE: the annotations are returned as-is. This is the default and
+ it is compatible with the behavior on previous Python versions.
+ * FORWARDREF: return annotations as-is if possible, but replace any
+ undefined names with ForwardRef objects. The implementation proposed by
+ PEP 649 relies on language changes that cannot be backported; the
+ typing-extensions implementation simply returns the same result as VALUE.
+ * STRING: return annotations as strings, in a format close to the original
+ source. Again, this behavior cannot be replicated directly in a backport.
+ As an approximation, typing-extensions retrieves the annotations under
+ VALUE semantics and then stringifies them.
+
+ The purpose of this backport is to allow users who would like to use
+ FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
+ want to support earlier Python versions, to simply write:
+
+ typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
+
+ """
+ format = Format(format)
+ if format is Format.VALUE_WITH_FAKE_GLOBALS:
+ raise ValueError(
+ "The VALUE_WITH_FAKE_GLOBALS format is for internal use only"
+ )
+
+ if eval_str and format is not Format.VALUE:
+ raise ValueError("eval_str=True is only supported with format=Format.VALUE")
+
+ if isinstance(obj, type):
+ # class
+ obj_dict = getattr(obj, '__dict__', None)
+ if obj_dict and hasattr(obj_dict, 'get'):
+ ann = obj_dict.get('__annotations__', None)
+ if isinstance(ann, _types.GetSetDescriptorType):
+ ann = None
+ else:
+ ann = None
+
+ obj_globals = None
+ module_name = getattr(obj, '__module__', None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ obj_globals = getattr(module, '__dict__', None)
+ obj_locals = dict(vars(obj))
+ unwrap = obj
+ elif isinstance(obj, _types.ModuleType):
+ # module
+ ann = getattr(obj, '__annotations__', None)
+ obj_globals = obj.__dict__
+ obj_locals = None
+ unwrap = None
+ elif callable(obj):
+ # this includes types.Function, types.BuiltinFunctionType,
+ # types.BuiltinMethodType, functools.partial, functools.singledispatch,
+ # "class funclike" from Lib/test/test_inspect... on and on it goes.
+ ann = getattr(obj, '__annotations__', None)
+ obj_globals = getattr(obj, '__globals__', None)
+ obj_locals = None
+ unwrap = obj
+ elif hasattr(obj, '__annotations__'):
+ ann = obj.__annotations__
+ obj_globals = obj_locals = unwrap = None
+ else:
+ raise TypeError(f"{obj!r} is not a module, class, or callable.")
+
+ if ann is None:
+ return {}
+
+ if not isinstance(ann, dict):
+ raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
+
+ if not ann:
+ return {}
+
+ if not eval_str:
+ if format is Format.STRING:
+ return {
+ key: value if isinstance(value, str) else typing._type_repr(value)
+ for key, value in ann.items()
+ }
+ return dict(ann)
+
+ if unwrap is not None:
+ while True:
+ if hasattr(unwrap, '__wrapped__'):
+ unwrap = unwrap.__wrapped__
+ continue
+ if isinstance(unwrap, functools.partial):
+ unwrap = unwrap.func
+ continue
+ break
+ if hasattr(unwrap, "__globals__"):
+ obj_globals = unwrap.__globals__
+
+ if globals is None:
+ globals = obj_globals
+ if locals is None:
+ locals = obj_locals or {}
+
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ if type_params := getattr(obj, "__type_params__", ()):
+ locals = {param.__name__: param for param in type_params} | locals
+
+ return_value = {key:
+ value if not isinstance(value, str) else eval(value, globals, locals)
+ for key, value in ann.items() }
+ return return_value
+
+
+if hasattr(typing, "evaluate_forward_ref"):
+ evaluate_forward_ref = typing.evaluate_forward_ref
+else:
+ # Implements annotationlib.ForwardRef.evaluate
+ def _eval_with_owner(
+ forward_ref, *, owner=None, globals=None, locals=None, type_params=None
+ ):
+ if forward_ref.__forward_evaluated__:
+ return forward_ref.__forward_value__
+ if getattr(forward_ref, "__cell__", None) is not None:
+ try:
+ value = forward_ref.__cell__.cell_contents
+ except ValueError:
+ pass
+ else:
+ forward_ref.__forward_evaluated__ = True
+ forward_ref.__forward_value__ = value
+ return value
+ if owner is None:
+ owner = getattr(forward_ref, "__owner__", None)
+
+ if (
+ globals is None
+ and getattr(forward_ref, "__forward_module__", None) is not None
+ ):
+ globals = getattr(
+ sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
+ )
+ if globals is None:
+ globals = getattr(forward_ref, "__globals__", None)
+ if globals is None:
+ if isinstance(owner, type):
+ module_name = getattr(owner, "__module__", None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ globals = getattr(module, "__dict__", None)
+ elif isinstance(owner, _types.ModuleType):
+ globals = getattr(owner, "__dict__", None)
+ elif callable(owner):
+ globals = getattr(owner, "__globals__", None)
+
+ # If we pass None to eval() below, the globals of this module are used.
+ if globals is None:
+ globals = {}
+
+ if locals is None:
+ locals = {}
+ if isinstance(owner, type):
+ locals.update(vars(owner))
+
+ if type_params is None and owner is not None:
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ type_params = getattr(owner, "__type_params__", None)
+
+ # Type parameters exist in their own scope, which is logically
+ # between the locals and the globals. We simulate this by adding
+ # them to the globals.
+ if type_params is not None:
+ globals = dict(globals)
+ for param in type_params:
+ globals[param.__name__] = param
+
+ arg = forward_ref.__forward_arg__
+ if arg.isidentifier() and not keyword.iskeyword(arg):
+ if arg in locals:
+ value = locals[arg]
+ elif arg in globals:
+ value = globals[arg]
+ elif hasattr(builtins, arg):
+ return getattr(builtins, arg)
+ else:
+ raise NameError(arg)
+ else:
+ code = forward_ref.__forward_code__
+ value = eval(code, globals, locals)
+ forward_ref.__forward_evaluated__ = True
+ forward_ref.__forward_value__ = value
+ return value
+
+ def evaluate_forward_ref(
+ forward_ref,
+ *,
+ owner=None,
+ globals=None,
+ locals=None,
+ type_params=None,
+ format=None,
+ _recursive_guard=frozenset(),
+ ):
+ """Evaluate a forward reference as a type hint.
+
+ This is similar to calling the ForwardRef.evaluate() method,
+ but unlike that method, evaluate_forward_ref() also:
+
+ * Recursively evaluates forward references nested within the type hint.
+ * Rejects certain objects that are not valid type hints.
+ * Replaces type hints that evaluate to None with types.NoneType.
+ * Supports the *FORWARDREF* and *STRING* formats.
+
+ *forward_ref* must be an instance of ForwardRef. *owner*, if given,
+ should be the object that holds the annotations that the forward reference
+ derived from, such as a module, class object, or function. It is used to
+ infer the namespaces to use for looking up names. *globals* and *locals*
+ can also be explicitly given to provide the global and local namespaces.
+ *type_params* is a tuple of type parameters that are in scope when
+ evaluating the forward reference. This parameter must be provided (though
+ it may be an empty tuple) if *owner* is not given and the forward reference
+ does not already have an owner set. *format* specifies the format of the
+ annotation and is a member of the annotationlib.Format enum.
+
+ """
+ if format == Format.STRING:
+ return forward_ref.__forward_arg__
+ if forward_ref.__forward_arg__ in _recursive_guard:
+ return forward_ref
+
+ # Evaluate the forward reference
+ try:
+ value = _eval_with_owner(
+ forward_ref,
+ owner=owner,
+ globals=globals,
+ locals=locals,
+ type_params=type_params,
+ )
+ except NameError:
+ if format == Format.FORWARDREF:
+ return forward_ref
+ else:
+ raise
+
+ if isinstance(value, str):
+ value = ForwardRef(value)
+
+ # Recursively evaluate the type
+ if isinstance(value, ForwardRef):
+ if getattr(value, "__forward_module__", True) is not None:
+ globals = None
+ return evaluate_forward_ref(
+ value,
+ globals=globals,
+ locals=locals,
+ type_params=type_params, owner=owner,
+ _recursive_guard=_recursive_guard, format=format
+ )
+ if sys.version_info < (3, 12, 5) and type_params:
+ # Make use of type_params
+ locals = dict(locals) if locals else {}
+ for tvar in type_params:
+ if tvar.__name__ not in locals: # lets not overwrite something present
+ locals[tvar.__name__] = tvar
+ if sys.version_info < (3, 12, 5):
+ return typing._eval_type(
+ value,
+ globals,
+ locals,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ )
+ else:
+ return typing._eval_type(
+ value,
+ globals,
+ locals,
+ type_params,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ )
+
+
+class Sentinel:
+ """Create a unique sentinel object.
+
+ *name* should be the name of the variable to which the return value shall be assigned.
+
+ *repr*, if supplied, will be used for the repr of the sentinel object.
+ If not provided, "<name>" will be used.
+ """
+
+ def __init__(
+ self,
+ name: str,
+ repr: typing.Optional[str] = None,
+ ):
+ self._name = name
+ self._repr = repr if repr is not None else f'<{name}>'
+
+ def __repr__(self):
+ return self._repr
+
+ if sys.version_info < (3, 11):
+ # The presence of this method convinces typing._type_check
+ # that Sentinels are types.
+ def __call__(self, *args, **kwargs):
+ raise TypeError(f"{type(self).__name__!r} object is not callable")
+
+ # Breakpoint: https://github.com/python/cpython/pull/21515
+ if sys.version_info >= (3, 10):
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __getstate__(self):
+ raise TypeError(f"Cannot pickle {type(self).__name__!r} object")
+
+
+if sys.version_info >= (3, 14, 0, "beta"):
+ type_repr = annotationlib.type_repr
+else:
+ def type_repr(value):
+ """Convert a Python value to a format suitable for use with the STRING format.
+
+ This is intended as a helper for tools that support the STRING format but do
+ not have access to the code that originally produced the annotations. It uses
+ repr() for most objects.
+
+ """
+ if isinstance(value, (type, _types.FunctionType, _types.BuiltinFunctionType)):
+ if value.__module__ == "builtins":
+ return value.__qualname__
+ return f"{value.__module__}.{value.__qualname__}"
+ if value is ...:
+ return "..."
+ return repr(value)
+
+
+# Aliases for items that are in typing in all supported versions.
+# We use hasattr() checks so this library will continue to import on
+# future versions of Python that may remove these names.
+_typing_names = [
+ "AbstractSet",
+ "AnyStr",
+ "BinaryIO",
+ "Callable",
+ "Collection",
+ "Container",
+ "Dict",
+ "FrozenSet",
+ "Hashable",
+ "IO",
+ "ItemsView",
+ "Iterable",
+ "Iterator",
+ "KeysView",
+ "List",
+ "Mapping",
+ "MappingView",
+ "Match",
+ "MutableMapping",
+ "MutableSequence",
+ "MutableSet",
+ "Optional",
+ "Pattern",
+ "Reversible",
+ "Sequence",
+ "Set",
+ "Sized",
+ "TextIO",
+ "Tuple",
+ "Union",
+ "ValuesView",
+ "cast",
+ "no_type_check",
+ "no_type_check_decorator",
+ # This is private, but it was defined by typing_extensions for a long time
+ # and some users rely on it.
+ "_AnnotatedAlias",
+]
+globals().update(
+ {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)}
+)
+# These are defined unconditionally because they are used in
+# typing-extensions itself.
+Generic = typing.Generic
+ForwardRef = typing.ForwardRef
+Annotated = typing.Annotated
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/METADATA"
new file mode 100644
index 0000000..7225885
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/METADATA"
@@ -0,0 +1,156 @@
+Metadata-Version: 2.4
+Name: urllib3
+Version: 2.6.2
+Summary: HTTP library with thread-safe connection pooling, file post, and more.
+Project-URL: Changelog, https://github.com/urllib3/urllib3/blob/main/CHANGES.rst
+Project-URL: Documentation, https://urllib3.readthedocs.io
+Project-URL: Code, https://github.com/urllib3/urllib3
+Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+Author-email: Andrey Petrov <andrey.petrov@shazow.net>
+Maintainer-email: Seth Michael Larson <sethmichaellarson@gmail.com>, Quentin Pradet <quentin@pradet.me>, Illia Volochii <illia.volochii@gmail.com>
+License-Expression: MIT
+License-File: LICENSE.txt
+Keywords: filepost,http,httplib,https,pooling,ssl,threadsafe,urllib
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=3.9
+Provides-Extra: brotli
+Requires-Dist: brotli>=1.2.0; (platform_python_implementation == 'CPython') and extra == 'brotli'
+Requires-Dist: brotlicffi>=1.2.0.0; (platform_python_implementation != 'CPython') and extra == 'brotli'
+Provides-Extra: h2
+Requires-Dist: h2<5,>=4; extra == 'h2'
+Provides-Extra: socks
+Requires-Dist: pysocks!=1.5.7,<2.0,>=1.5.6; extra == 'socks'
+Provides-Extra: zstd
+Requires-Dist: backports-zstd>=1.0.0; (python_version < '3.14') and extra == 'zstd'
+Description-Content-Type: text/markdown
+
+<h1 align="center">
+
+
+
+</h1>
+
+<p align="center">
+ <a href="https://pypi.org/project/urllib3"><img alt="PyPI Version" src="https://img.shields.io/pypi/v/urllib3.svg?maxAge=86400" /></a>
+ <a href="https://pypi.org/project/urllib3"><img alt="Python Versions" src="https://img.shields.io/pypi/pyversions/urllib3.svg?maxAge=86400" /></a>
+ <a href="https://discord.gg/urllib3"><img alt="Join our Discord" src="https://img.shields.io/discord/756342717725933608?color=%237289da&label=discord" /></a>
+ <a href="https://github.com/urllib3/urllib3/actions?query=workflow%3ACI"><img alt="Coverage Status" src="https://img.shields.io/badge/coverage-100%25-success" /></a>
+ <a href="https://github.com/urllib3/urllib3/actions/workflows/ci.yml?query=branch%3Amain"><img alt="Build Status on GitHub" src="https://github.com/urllib3/urllib3/actions/workflows/ci.yml/badge.svg?branch:main&workflow:CI" /></a>
+ <a href="https://urllib3.readthedocs.io"><img alt="Documentation Status" src="https://readthedocs.org/projects/urllib3/badge/?version=latest" /></a><br>
+ <a href="https://deps.dev/pypi/urllib3"><img alt="OpenSSF Scorecard" src="https://api.securityscorecards.dev/projects/github.com/urllib3/urllib3/badge" /></a>
+ <a href="https://slsa.dev"><img alt="SLSA 3" src="https://slsa.dev/images/gh-badge-level3.svg" /></a>
+ <a href="https://bestpractices.coreinfrastructure.org/projects/6227"><img alt="CII Best Practices" src="https://bestpractices.coreinfrastructure.org/projects/6227/badge" /></a>
+</p>
+
+urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
+Python ecosystem already uses urllib3 and you should too.
+urllib3 brings many critical features that are missing from the Python
+standard libraries:
+
+- Thread safety.
+- Connection pooling.
+- Client-side SSL/TLS verification.
+- File uploads with multipart encoding.
+- Helpers for retrying requests and dealing with HTTP redirects.
+- Support for gzip, deflate, brotli, and zstd encoding.
+- Proxy support for HTTP and SOCKS.
+- 100% test coverage.
+
+urllib3 is powerful and easy to use:
+
+```python3
+>>> import urllib3
+>>> resp = urllib3.request("GET", "http://httpbin.org/robots.txt")
+>>> resp.status
+200
+>>> resp.data
+b"User-agent: *\nDisallow: /deny\n"
+```
+
+## Installing
+
+urllib3 can be installed with [pip](https://pip.pypa.io):
+
+```bash
+$ python -m pip install urllib3
+```
+
+Alternatively, you can grab the latest source code from [GitHub](https://github.com/urllib3/urllib3):
+
+```bash
+$ git clone https://github.com/urllib3/urllib3.git
+$ cd urllib3
+$ pip install .
+```
+
+
+## Documentation
+
+urllib3 has usage and reference documentation at [urllib3.readthedocs.io](https://urllib3.readthedocs.io).
+
+
+## Community
+
+urllib3 has a [community Discord channel](https://discord.gg/urllib3) for asking questions and
+collaborating with other contributors. Drop by and say hello 👋
+
+
+## Contributing
+
+urllib3 happily accepts contributions. Please see our
+[contributing documentation](https://urllib3.readthedocs.io/en/latest/contributing.html)
+for some tips on getting started.
+
+
+## Security Disclosures
+
+To report a security vulnerability, please use the
+[Tidelift security contact](https://tidelift.com/security).
+Tidelift will coordinate the fix and disclosure with maintainers.
+
+
+## Maintainers
+
+- Lead: [@illia-v](https://github.com/illia-v) (Illia Volochii)
+- [@sethmlarson](https://github.com/sethmlarson) (Seth M. Larson)
+- [@pquentin](https://github.com/pquentin) (Quentin Pradet)
+- [@theacodes](https://github.com/theacodes) (Thea Flowers)
+- [@haikuginger](https://github.com/haikuginger) (Jess Shapiro)
+- [@lukasa](https://github.com/lukasa) (Cory Benfield)
+- [@sigmavirus24](https://github.com/sigmavirus24) (Ian Stapleton Cordasco)
+- [@shazow](https://github.com/shazow) (Andrey Petrov)
+
+👋
+
+
+## Sponsorship
+
+If your company benefits from this library, please consider [sponsoring its
+development](https://urllib3.readthedocs.io/en/latest/sponsors.html).
+
+
+## For Enterprise
+
+Professional support for urllib3 is available as part of the [Tidelift
+Subscription][1]. Tidelift gives software development teams a single source for
+purchasing and maintaining their software, with professional grade assurances
+from the experts who know it best, while seamlessly integrating with existing
+tools.
+
+[1]: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/RECORD"
new file mode 100644
index 0000000..85ced09
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/RECORD"
@@ -0,0 +1,79 @@
+urllib3-2.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+urllib3-2.6.2.dist-info/METADATA,sha256=Uy2ossezmwF-g3RRsNnJRgYaLsSp4E1BYeSHFmpzGlM,6627
+urllib3-2.6.2.dist-info/RECORD,,
+urllib3-2.6.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
+urllib3-2.6.2.dist-info/licenses/LICENSE.txt,sha256=Ew46ZNX91dCWp1JpRjSn2d8oRGnehuVzIQAmgEHj1oY,1093
+urllib3/__init__.py,sha256=JMo1tg1nIV1AeJ2vENC_Txfl0e5h6Gzl9DGVk1rWRbo,6979
+urllib3/__pycache__/__init__.cpython-312.pyc,,
+urllib3/__pycache__/_base_connection.cpython-312.pyc,,
+urllib3/__pycache__/_collections.cpython-312.pyc,,
+urllib3/__pycache__/_request_methods.cpython-312.pyc,,
+urllib3/__pycache__/_version.cpython-312.pyc,,
+urllib3/__pycache__/connection.cpython-312.pyc,,
+urllib3/__pycache__/connectionpool.cpython-312.pyc,,
+urllib3/__pycache__/exceptions.cpython-312.pyc,,
+urllib3/__pycache__/fields.cpython-312.pyc,,
+urllib3/__pycache__/filepost.cpython-312.pyc,,
+urllib3/__pycache__/poolmanager.cpython-312.pyc,,
+urllib3/__pycache__/response.cpython-312.pyc,,
+urllib3/_base_connection.py,sha256=T1cwH3RhzsrBh6Bz3AOGVDboRsE7veijqZPXXQTR2Rg,5568
+urllib3/_collections.py,sha256=UvV7UqtGTSKdvw8N_LxWuEikZLm5gB1zFfTZYH9KhAk,17595
+urllib3/_request_methods.py,sha256=gCeF85SO_UU4WoPwYHIoz_tw-eM_EVOkLFp8OFsC7DA,9931
+urllib3/_version.py,sha256=K_PdMoDM3cnCth9l5Wj8Zgz4tHrD_dXoBfsMuI4VHeQ,704
+urllib3/connection.py,sha256=1ZR2gqfFdIzTYIUwF0K5nftg26hLqU5nr1yHTdKb7WA,42800
+urllib3/connectionpool.py,sha256=ZEhudsa8BIubD2M0XoxBBsjxbsXwMgUScH7oQ9i-j1Y,43371
+urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+urllib3/contrib/__pycache__/__init__.cpython-312.pyc,,
+urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc,,
+urllib3/contrib/__pycache__/socks.cpython-312.pyc,,
+urllib3/contrib/emscripten/__init__.py,sha256=u6KNgzjlFZbuAAXa_ybCR7gQ71VJESnF-IIdDA73brw,733
+urllib3/contrib/emscripten/__pycache__/__init__.cpython-312.pyc,,
+urllib3/contrib/emscripten/__pycache__/connection.cpython-312.pyc,,
+urllib3/contrib/emscripten/__pycache__/fetch.cpython-312.pyc,,
+urllib3/contrib/emscripten/__pycache__/request.cpython-312.pyc,,
+urllib3/contrib/emscripten/__pycache__/response.cpython-312.pyc,,
+urllib3/contrib/emscripten/connection.py,sha256=9lM1TANMxLS13w2dj7IZVLZk0ASiegdmk_cMEon2beo,8885
+urllib3/contrib/emscripten/emscripten_fetch_worker.js,sha256=z1k3zZ4_hDKd3-tN7wzz8LHjHC2pxN_uu8B3k9D9A3c,3677
+urllib3/contrib/emscripten/fetch.py,sha256=5xcd--viFxZd2nBy0aK73dtJ9Tsh1yYZU_SUXwnwibk,23520
+urllib3/contrib/emscripten/request.py,sha256=mL28szy1KvE3NJhWor5jNmarp8gwplDU-7gwGZY5g0Q,566
+urllib3/contrib/emscripten/response.py,sha256=7oVPENYZHuzEGRtG40HonpH5tAIYHsGcHPbJt2Z0U-Y,9507
+urllib3/contrib/pyopenssl.py,sha256=4awTja4o3beTGTGmmWo_3rBoEgzje95Q4bgWz4iiSx8,19724
+urllib3/contrib/socks.py,sha256=eB2eWfu8Wz1fn-qvr_qE_dZAceck2Ncv7XQ15DlvVbU,7547
+urllib3/exceptions.py,sha256=eeQ77nJjF97bP6SvCK4gmx6BpQZKU8yjvM-AIDwZdX8,9952
+urllib3/fields.py,sha256=FCf7UULSkf10cuTRUWTQESzxgl1WT8e2aCy3kfyZins,10829
+urllib3/filepost.py,sha256=U8eNZ-mpKKHhrlbHEEiTxxgK16IejhEa7uz42yqA_dI,2388
+urllib3/http2/__init__.py,sha256=xzrASH7R5ANRkPJOot5lGnATOq3KKuyXzI42rcnwmqs,1741
+urllib3/http2/__pycache__/__init__.cpython-312.pyc,,
+urllib3/http2/__pycache__/connection.cpython-312.pyc,,
+urllib3/http2/__pycache__/probe.cpython-312.pyc,,
+urllib3/http2/connection.py,sha256=WynBX_dr6EMBDz_nw6f3ydmG2IxBTbP3CuqCzD8rByk,12674
+urllib3/http2/probe.py,sha256=nnAkqbhAakOiF75rz7W0udZ38Eeh_uD8fjV74N73FEI,3014
+urllib3/poolmanager.py,sha256=NYP5vkKfadGddaBacUk6z6u8rTP9wgCFGGjVtf1mkcc,23811
+urllib3/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93
+urllib3/response.py,sha256=ro3KcQPEsnlEwbOP3jykuxHL9eHPnH9MjnflTSClu4o,52736
+urllib3/util/__init__.py,sha256=-qeS0QceivazvBEKDNFCAI-6ACcdDOE4TMvo7SLNlAQ,1001
+urllib3/util/__pycache__/__init__.cpython-312.pyc,,
+urllib3/util/__pycache__/connection.cpython-312.pyc,,
+urllib3/util/__pycache__/proxy.cpython-312.pyc,,
+urllib3/util/__pycache__/request.cpython-312.pyc,,
+urllib3/util/__pycache__/response.cpython-312.pyc,,
+urllib3/util/__pycache__/retry.cpython-312.pyc,,
+urllib3/util/__pycache__/ssl_.cpython-312.pyc,,
+urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc,,
+urllib3/util/__pycache__/ssltransport.cpython-312.pyc,,
+urllib3/util/__pycache__/timeout.cpython-312.pyc,,
+urllib3/util/__pycache__/url.cpython-312.pyc,,
+urllib3/util/__pycache__/util.cpython-312.pyc,,
+urllib3/util/__pycache__/wait.cpython-312.pyc,,
+urllib3/util/connection.py,sha256=JjO722lzHlzLXPTkr9ZWBdhseXnMVjMSb1DJLVrXSnQ,4444
+urllib3/util/proxy.py,sha256=seP8-Q5B6bB0dMtwPj-YcZZQ30vHuLqRu-tI0JZ2fzs,1148
+urllib3/util/request.py,sha256=itpnC8ug7D4nVfDmGUCRMlgkARUQ13r_XMxSnzTwmpE,8363
+urllib3/util/response.py,sha256=vQE639uoEhj1vpjEdxu5lNIhJCSUZkd7pqllUI0BZOA,3374
+urllib3/util/retry.py,sha256=bj-2YUqblxLlv8THg5fxww-DM54XCbjgZXIQ71XioCY,18459
+urllib3/util/ssl_.py,sha256=Y9RNkWCIehDxIRvyFnHUjiMlPolm368GYMya2YdDOag,19929
+urllib3/util/ssl_match_hostname.py,sha256=Di7DU7zokoltapT_F0Sj21ffYxwaS_cE5apOtwueeyA,5845
+urllib3/util/ssltransport.py,sha256=Ez4O8pR_vT8dan_FvqBYS6dgDfBXEMfVfrzcdUoWfi4,8847
+urllib3/util/timeout.py,sha256=4eT1FVeZZU7h7mYD1Jq2OXNe4fxekdNvhoWUkZusRpA,10346
+urllib3/util/url.py,sha256=WRh-TMYXosmgp8m8lT4H5spoHw5yUjlcMCfU53AkoAs,15205
+urllib3/util/util.py,sha256=j3lbZK1jPyiwD34T8IgJzdWEZVT-4E-0vYIJi9UjeNA,1146
+urllib3/util/wait.py,sha256=_ph8IrUR3sqPqi0OopQgJUlH4wzkGeM5CiyA7XGGtmI,4423
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/WHEEL"
new file mode 100644
index 0000000..ae8ec1b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/WHEEL"
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.28.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/licenses/LICENSE.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/licenses/LICENSE.txt"
new file mode 100644
index 0000000..e6183d0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3-2.6.2.dist-info/licenses/LICENSE.txt"
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2008-2020 Andrey Petrov and contributors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/__init__.py"
new file mode 100644
index 0000000..3fe782c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/__init__.py"
@@ -0,0 +1,211 @@
+"""
+Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
+"""
+
+from __future__ import annotations
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+import sys
+import typing
+import warnings
+from logging import NullHandler
+
+from . import exceptions
+from ._base_connection import _TYPE_BODY
+from ._collections import HTTPHeaderDict
+from ._version import __version__
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
+from .filepost import _TYPE_FIELDS, encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import BaseHTTPResponse, HTTPResponse
+from .util.request import make_headers
+from .util.retry import Retry
+from .util.timeout import Timeout
+
+# Ensure that Python is compiled with OpenSSL 1.1.1+
+# If the 'ssl' module isn't available at all that's
+# fine, we only care if the module is available.
+try:
+ import ssl
+except ImportError:
+ pass
+else:
+ if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive:
+ warnings.warn(
+ "urllib3 v2 only supports OpenSSL 1.1.1+, currently "
+ f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. "
+ "See: https://github.com/urllib3/urllib3/issues/3020",
+ exceptions.NotOpenSSLWarning,
+ )
+ elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive:
+ raise ImportError(
+ "urllib3 v2 only supports OpenSSL 1.1.1+, currently "
+ f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. "
+ "See: https://github.com/urllib3/urllib3/issues/2168"
+ )
+
+__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+__license__ = "MIT"
+__version__ = __version__
+
+__all__ = (
+ "HTTPConnectionPool",
+ "HTTPHeaderDict",
+ "HTTPSConnectionPool",
+ "PoolManager",
+ "ProxyManager",
+ "HTTPResponse",
+ "Retry",
+ "Timeout",
+ "add_stderr_logger",
+ "connection_from_url",
+ "disable_warnings",
+ "encode_multipart_formdata",
+ "make_headers",
+ "proxy_from_url",
+ "request",
+ "BaseHTTPResponse",
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(
+ level: int = logging.DEBUG,
+) -> logging.StreamHandler[typing.TextIO]:
+ """
+ Helper for quickly adding a StreamHandler to the logger. Useful for
+ debugging.
+
+ Returns the handler after adding it.
+ """
+ # This method needs to be in this __init__.py to get the __name__ correct
+ # even if urllib3 is vendored within another package.
+ logger = logging.getLogger(__name__)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
+ logger.addHandler(handler)
+ logger.setLevel(level)
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
+ return handler
+
+
+# ... Clean up.
+del NullHandler
+
+
+# All warning filters *must* be appended unless you're really certain that they
+# shouldn't be: otherwise, it's very hard for users to use most Python
+# mechanisms to silence them.
+# SecurityWarning's always go off by default.
+warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
+
+
+def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None:
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter("ignore", category)
+
+
+_DEFAULT_POOL = PoolManager()
+
+
+def request(
+ method: str,
+ url: str,
+ *,
+ body: _TYPE_BODY | None = None,
+ fields: _TYPE_FIELDS | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ preload_content: bool | None = True,
+ decode_content: bool | None = True,
+ redirect: bool | None = True,
+ retries: Retry | bool | int | None = None,
+ timeout: Timeout | float | int | None = 3,
+ json: typing.Any | None = None,
+) -> BaseHTTPResponse:
+ """
+ A convenience, top-level request method. It uses a module-global ``PoolManager`` instance.
+ Therefore, its side effects could be shared across dependencies relying on it.
+ To avoid side effects create a new ``PoolManager`` instance and use it instead.
+ The method does not accept low-level ``**urlopen_kw`` keyword arguments.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param body:
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+ :param fields:
+ Data to encode and send in the request body.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc.
+
+ :param bool preload_content:
+ If True, the response's body will be preloaded into memory.
+
+ :param bool decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param json:
+ Data to encode and send as JSON with UTF-encoded in the request body.
+ The ``"Content-Type"`` header will be set to ``"application/json"``
+ unless specified otherwise.
+ """
+
+ return _DEFAULT_POOL.request(
+ method,
+ url,
+ body=body,
+ fields=fields,
+ headers=headers,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ redirect=redirect,
+ retries=retries,
+ timeout=timeout,
+ json=json,
+ )
+
+
+if sys.platform == "emscripten":
+ from .contrib.emscripten import inject_into_urllib3 # noqa: 401
+
+ inject_into_urllib3()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_base_connection.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_base_connection.py"
new file mode 100644
index 0000000..dc0f318
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_base_connection.py"
@@ -0,0 +1,165 @@
+from __future__ import annotations
+
+import typing
+
+from .util.connection import _TYPE_SOCKET_OPTIONS
+from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
+from .util.url import Url
+
+_TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str]
+
+
+class ProxyConfig(typing.NamedTuple):
+ ssl_context: ssl.SSLContext | None
+ use_forwarding_for_https: bool
+ assert_hostname: None | str | typing.Literal[False]
+ assert_fingerprint: str | None
+
+
+class _ResponseOptions(typing.NamedTuple):
+ # TODO: Remove this in favor of a better
+ # HTTP request/response lifecycle tracking.
+ request_method: str
+ request_url: str
+ preload_content: bool
+ decode_content: bool
+ enforce_content_length: bool
+
+
+if typing.TYPE_CHECKING:
+ import ssl
+ from typing import Protocol
+
+ from .response import BaseHTTPResponse
+
+ class BaseHTTPConnection(Protocol):
+ default_port: typing.ClassVar[int]
+ default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
+
+ host: str
+ port: int
+ timeout: None | (
+ float
+ ) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved.
+ blocksize: int
+ source_address: tuple[str, int] | None
+ socket_options: _TYPE_SOCKET_OPTIONS | None
+
+ proxy: Url | None
+ proxy_config: ProxyConfig | None
+
+ is_verified: bool
+ proxy_is_verified: bool | None
+
+ def __init__(
+ self,
+ host: str,
+ port: int | None = None,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 8192,
+ socket_options: _TYPE_SOCKET_OPTIONS | None = ...,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ ) -> None: ...
+
+ def set_tunnel(
+ self,
+ host: str,
+ port: int | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ scheme: str = "http",
+ ) -> None: ...
+
+ def connect(self) -> None: ...
+
+ def request(
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ # We know *at least* botocore is depending on the order of the
+ # first 3 parameters so to be safe we only mark the later ones
+ # as keyword-only to ensure we have space to extend.
+ *,
+ chunked: bool = False,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ ) -> None: ...
+
+ def getresponse(self) -> BaseHTTPResponse: ...
+
+ def close(self) -> None: ...
+
+ @property
+ def is_closed(self) -> bool:
+ """Whether the connection either is brand new or has been previously closed.
+ If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
+ properties must be False.
+ """
+
+ @property
+ def is_connected(self) -> bool:
+ """Whether the connection is actively connected to any origin (proxy or target)"""
+
+ @property
+ def has_connected_to_proxy(self) -> bool:
+ """Whether the connection has successfully connected to its proxy.
+ This returns False if no proxy is in use. Used to determine whether
+ errors are coming from the proxy layer or from tunnelling to the target origin.
+ """
+
+ class BaseHTTPSConnection(BaseHTTPConnection, Protocol):
+ default_port: typing.ClassVar[int]
+ default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
+
+ # Certificate verification methods
+ cert_reqs: int | str | None
+ assert_hostname: None | str | typing.Literal[False]
+ assert_fingerprint: str | None
+ ssl_context: ssl.SSLContext | None
+
+ # Trusted CAs
+ ca_certs: str | None
+ ca_cert_dir: str | None
+ ca_cert_data: None | str | bytes
+
+ # TLS version
+ ssl_minimum_version: int | None
+ ssl_maximum_version: int | None
+ ssl_version: int | str | None # Deprecated
+
+ # Client certificates
+ cert_file: str | None
+ key_file: str | None
+ key_password: str | None
+
+ def __init__(
+ self,
+ host: str,
+ port: int | None = None,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 16384,
+ socket_options: _TYPE_SOCKET_OPTIONS | None = ...,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ cert_reqs: int | str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ server_hostname: str | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ ca_certs: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ssl_minimum_version: int | None = None,
+ ssl_maximum_version: int | None = None,
+ ssl_version: int | str | None = None, # Deprecated
+ cert_file: str | None = None,
+ key_file: str | None = None,
+ key_password: str | None = None,
+ ) -> None: ...
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_collections.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_collections.py"
new file mode 100644
index 0000000..0378aab
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_collections.py"
@@ -0,0 +1,487 @@
+from __future__ import annotations
+
+import typing
+from collections import OrderedDict
+from enum import Enum, auto
+from threading import RLock
+
+if typing.TYPE_CHECKING:
+ # We can only import Protocol if TYPE_CHECKING because it's a development
+ # dependency, and is not available at runtime.
+ from typing import Protocol
+
+ from typing_extensions import Self
+
+ class HasGettableStringKeys(Protocol):
+ def keys(self) -> typing.Iterator[str]: ...
+
+ def __getitem__(self, key: str) -> str: ...
+
+
+__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
+
+
+# Key type
+_KT = typing.TypeVar("_KT")
+# Value type
+_VT = typing.TypeVar("_VT")
+# Default type
+_DT = typing.TypeVar("_DT")
+
+ValidHTTPHeaderSource = typing.Union[
+ "HTTPHeaderDict",
+ typing.Mapping[str, str],
+ typing.Iterable[tuple[str, str]],
+ "HasGettableStringKeys",
+]
+
+
+class _Sentinel(Enum):
+ not_passed = auto()
+
+
+def ensure_can_construct_http_header_dict(
+ potential: object,
+) -> ValidHTTPHeaderSource | None:
+ if isinstance(potential, HTTPHeaderDict):
+ return potential
+ elif isinstance(potential, typing.Mapping):
+ # Full runtime checking of the contents of a Mapping is expensive, so for the
+ # purposes of typechecking, we assume that any Mapping is the right shape.
+ return typing.cast(typing.Mapping[str, str], potential)
+ elif isinstance(potential, typing.Iterable):
+ # Similarly to Mapping, full runtime checking of the contents of an Iterable is
+ # expensive, so for the purposes of typechecking, we assume that any Iterable
+ # is the right shape.
+ return typing.cast(typing.Iterable[tuple[str, str]], potential)
+ elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"):
+ return typing.cast("HasGettableStringKeys", potential)
+ else:
+ return None
+
+
+class RecentlyUsedContainer(typing.Generic[_KT, _VT], typing.MutableMapping[_KT, _VT]):
+ """
+ Provides a thread-safe dict-like container which maintains up to
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
+ ``maxsize``.
+
+ :param maxsize:
+ Maximum number of recent elements to retain.
+
+ :param dispose_func:
+ Every time an item is evicted from the container,
+ ``dispose_func(value)`` is called. Callback which will get called
+ """
+
+ _container: typing.OrderedDict[_KT, _VT]
+ _maxsize: int
+ dispose_func: typing.Callable[[_VT], None] | None
+ lock: RLock
+
+ def __init__(
+ self,
+ maxsize: int = 10,
+ dispose_func: typing.Callable[[_VT], None] | None = None,
+ ) -> None:
+ super().__init__()
+ self._maxsize = maxsize
+ self.dispose_func = dispose_func
+ self._container = OrderedDict()
+ self.lock = RLock()
+
+ def __getitem__(self, key: _KT) -> _VT:
+ # Re-insert the item, moving it to the end of the eviction line.
+ with self.lock:
+ item = self._container.pop(key)
+ self._container[key] = item
+ return item
+
+ def __setitem__(self, key: _KT, value: _VT) -> None:
+ evicted_item = None
+ with self.lock:
+ # Possibly evict the existing value of 'key'
+ try:
+ # If the key exists, we'll overwrite it, which won't change the
+ # size of the pool. Because accessing a key should move it to
+ # the end of the eviction line, we pop it out first.
+ evicted_item = key, self._container.pop(key)
+ self._container[key] = value
+ except KeyError:
+ # When the key does not exist, we insert the value first so that
+ # evicting works in all cases, including when self._maxsize is 0
+ self._container[key] = value
+ if len(self._container) > self._maxsize:
+ # If we didn't evict an existing value, and we've hit our maximum
+ # size, then we have to evict the least recently used item from
+ # the beginning of the container.
+ evicted_item = self._container.popitem(last=False)
+
+ # After releasing the lock on the pool, dispose of any evicted value.
+ if evicted_item is not None and self.dispose_func:
+ _, evicted_value = evicted_item
+ self.dispose_func(evicted_value)
+
+ def __delitem__(self, key: _KT) -> None:
+ with self.lock:
+ value = self._container.pop(key)
+
+ if self.dispose_func:
+ self.dispose_func(value)
+
+ def __len__(self) -> int:
+ with self.lock:
+ return len(self._container)
+
+ def __iter__(self) -> typing.NoReturn:
+ raise NotImplementedError(
+ "Iteration over this class is unlikely to be threadsafe."
+ )
+
+ def clear(self) -> None:
+ with self.lock:
+ # Copy pointers to all values, then wipe the mapping
+ values = list(self._container.values())
+ self._container.clear()
+
+ if self.dispose_func:
+ for value in values:
+ self.dispose_func(value)
+
+ def keys(self) -> set[_KT]: # type: ignore[override]
+ with self.lock:
+ return set(self._container.keys())
+
+
+class HTTPHeaderDictItemView(set[tuple[str, str]]):
+ """
+ HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of
+ address.
+
+ If we directly try to get an item with a particular name, we will get a string
+ back that is the concatenated version of all the values:
+
+ >>> d['X-Header-Name']
+ 'Value1, Value2, Value3'
+
+ However, if we iterate over an HTTPHeaderDict's items, we will optionally combine
+ these values based on whether combine=True was called when building up the dictionary
+
+ >>> d = HTTPHeaderDict({"A": "1", "B": "foo"})
+ >>> d.add("A", "2", combine=True)
+ >>> d.add("B", "bar")
+ >>> list(d.items())
+ [
+ ('A', '1, 2'),
+ ('B', 'foo'),
+ ('B', 'bar'),
+ ]
+
+ This class conforms to the interface required by the MutableMapping ABC while
+ also giving us the nonstandard iteration behavior we want; items with duplicate
+ keys, ordered by time of first insertion.
+ """
+
+ _headers: HTTPHeaderDict
+
+ def __init__(self, headers: HTTPHeaderDict) -> None:
+ self._headers = headers
+
+ def __len__(self) -> int:
+ return len(list(self._headers.iteritems()))
+
+ def __iter__(self) -> typing.Iterator[tuple[str, str]]:
+ return self._headers.iteritems()
+
+ def __contains__(self, item: object) -> bool:
+ if isinstance(item, tuple) and len(item) == 2:
+ passed_key, passed_val = item
+ if isinstance(passed_key, str) and isinstance(passed_val, str):
+ return self._headers._has_value_for_header(passed_key, passed_val)
+ return False
+
+
+class HTTPHeaderDict(typing.MutableMapping[str, str]):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+ """
+
+ _container: typing.MutableMapping[str, list[str]]
+
+ def __init__(self, headers: ValidHTTPHeaderSource | None = None, **kwargs: str):
+ super().__init__()
+ self._container = {} # 'dict' is insert-ordered
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key: str, val: str) -> None:
+ # avoid a bytes/str comparison by decoding before httplib
+ if isinstance(key, bytes):
+ key = key.decode("latin-1")
+ self._container[key.lower()] = [key, val]
+
+ def __getitem__(self, key: str) -> str:
+ if isinstance(key, bytes):
+ key = key.decode("latin-1")
+ val = self._container[key.lower()]
+ return ", ".join(val[1:])
+
+ def __delitem__(self, key: str) -> None:
+ if isinstance(key, bytes):
+ key = key.decode("latin-1")
+ del self._container[key.lower()]
+
+ def __contains__(self, key: object) -> bool:
+ if isinstance(key, bytes):
+ key = key.decode("latin-1")
+ if isinstance(key, str):
+ return key.lower() in self._container
+ return False
+
+ def setdefault(self, key: str, default: str = "") -> str:
+ return super().setdefault(key, default)
+
+ def __eq__(self, other: object) -> bool:
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
+ if maybe_constructable is None:
+ return False
+ else:
+ other_as_http_header_dict = type(self)(maybe_constructable)
+
+ return {k.lower(): v for k, v in self.itermerged()} == {
+ k.lower(): v for k, v in other_as_http_header_dict.itermerged()
+ }
+
+ def __ne__(self, other: object) -> bool:
+ return not self.__eq__(other)
+
+ def __len__(self) -> int:
+ return len(self._container)
+
+ def __iter__(self) -> typing.Iterator[str]:
+ # Only provide the originally cased names
+ for vals in self._container.values():
+ yield vals[0]
+
+ def discard(self, key: str) -> None:
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key: str, val: str, *, combine: bool = False) -> None:
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ If this is called with combine=True, instead of adding a new header value
+ as a distinct item during iteration, this will instead append the value to
+ any existing header value with a comma. If no existing header value exists
+ for the key, then the value will simply be added, ignoring the combine parameter.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ >>> list(headers.items())
+ [('foo', 'bar'), ('foo', 'baz')]
+ >>> headers.add('foo', 'quz', combine=True)
+ >>> list(headers.items())
+ [('foo', 'bar, baz, quz')]
+ """
+ # avoid a bytes/str comparison by decoding before httplib
+ if isinstance(key, bytes):
+ key = key.decode("latin-1")
+ key_lower = key.lower()
+ new_vals = [key, val]
+ # Keep the common case aka no item present as fast as possible
+ vals = self._container.setdefault(key_lower, new_vals)
+ if new_vals is not vals:
+ # if there are values here, then there is at least the initial
+ # key/value pair
+ assert len(vals) >= 2
+ if combine:
+ vals[-1] = vals[-1] + ", " + val
+ else:
+ vals.append(val)
+
+ def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None:
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError(
+ f"extend() takes at most 1 positional arguments ({len(args)} given)"
+ )
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, typing.Mapping):
+ for key, val in other.items():
+ self.add(key, val)
+ elif isinstance(other, typing.Iterable):
+ other = typing.cast(typing.Iterable[tuple[str, str]], other)
+ for key, value in other:
+ self.add(key, value)
+ elif hasattr(other, "keys") and hasattr(other, "__getitem__"):
+ # THIS IS NOT A TYPESAFE BRANCH
+ # In this branch, the object has a `keys` attr but is not a Mapping or any of
+ # the other types indicated in the method signature. We do some stuff with
+ # it as though it partially implements the Mapping interface, but we're not
+ # doing that stuff safely AT ALL.
+ for key in other.keys():
+ self.add(key, other[key])
+
+ for key, value in kwargs.items():
+ self.add(key, value)
+
+ @typing.overload
+ def getlist(self, key: str) -> list[str]: ...
+
+ @typing.overload
+ def getlist(self, key: str, default: _DT) -> list[str] | _DT: ...
+
+ def getlist(
+ self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed
+ ) -> list[str] | _DT:
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ if isinstance(key, bytes):
+ key = key.decode("latin-1")
+ try:
+ vals = self._container[key.lower()]
+ except KeyError:
+ if default is _Sentinel.not_passed:
+ # _DT is unbound; empty list is instance of List[str]
+ return []
+ # _DT is bound; default is instance of _DT
+ return default
+ else:
+ # _DT may or may not be bound; vals[1:] is instance of List[str], which
+ # meets our external interface requirement of `Union[List[str], _DT]`.
+ return vals[1:]
+
+ def _prepare_for_method_change(self) -> Self:
+ """
+ Remove content-specific header fields before changing the request
+ method to GET or HEAD according to RFC 9110, Section 15.4.
+ """
+ content_specific_headers = [
+ "Content-Encoding",
+ "Content-Language",
+ "Content-Location",
+ "Content-Type",
+ "Content-Length",
+ "Digest",
+ "Last-Modified",
+ ]
+ for header in content_specific_headers:
+ self.discard(header)
+ return self
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
+
+ # Backwards compatibility for http.cookiejar
+ get_all = getlist
+
+ def __repr__(self) -> str:
+ return f"{type(self).__name__}({dict(self.itermerged())})"
+
+ def _copy_from(self, other: HTTPHeaderDict) -> None:
+ for key in other:
+ val = other.getlist(key)
+ self._container[key.lower()] = [key, *val]
+
+ def copy(self) -> Self:
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self) -> typing.Iterator[tuple[str, str]]:
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = self._container[key.lower()]
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self) -> typing.Iterator[tuple[str, str]]:
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = self._container[key.lower()]
+ yield val[0], ", ".join(val[1:])
+
+ def items(self) -> HTTPHeaderDictItemView: # type: ignore[override]
+ return HTTPHeaderDictItemView(self)
+
+ def _has_value_for_header(self, header_name: str, potential_value: str) -> bool:
+ if header_name in self:
+ return potential_value in self._container[header_name.lower()][1:]
+ return False
+
+ def __ior__(self, other: object) -> HTTPHeaderDict:
+ # Supports extending a header dict in-place using operator |=
+ # combining items with add instead of __setitem__
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
+ if maybe_constructable is None:
+ return NotImplemented
+ self.extend(maybe_constructable)
+ return self
+
+ def __or__(self, other: object) -> Self:
+ # Supports merging header dicts using operator |
+ # combining items with add instead of __setitem__
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
+ if maybe_constructable is None:
+ return NotImplemented
+ result = self.copy()
+ result.extend(maybe_constructable)
+ return result
+
+ def __ror__(self, other: object) -> Self:
+ # Supports merging header dicts using operator | when other is on left side
+ # combining items with add instead of __setitem__
+ maybe_constructable = ensure_can_construct_http_header_dict(other)
+ if maybe_constructable is None:
+ return NotImplemented
+ result = type(self)(maybe_constructable)
+ result.extend(self)
+ return result
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_request_methods.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_request_methods.py"
new file mode 100644
index 0000000..297c271
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_request_methods.py"
@@ -0,0 +1,278 @@
+from __future__ import annotations
+
+import json as _json
+import typing
+from urllib.parse import urlencode
+
+from ._base_connection import _TYPE_BODY
+from ._collections import HTTPHeaderDict
+from .filepost import _TYPE_FIELDS, encode_multipart_formdata
+from .response import BaseHTTPResponse
+
+__all__ = ["RequestMethods"]
+
+_TYPE_ENCODE_URL_FIELDS = typing.Union[
+ typing.Sequence[tuple[str, typing.Union[str, bytes]]],
+ typing.Mapping[str, typing.Union[str, bytes]],
+]
+
+
+class RequestMethods:
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`urllib3.HTTPConnectionPool` and
+ :class:`urllib3.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-form-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+
+ Initializer parameters:
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
+
+ def __init__(self, headers: typing.Mapping[str, str] | None = None) -> None:
+ self.headers = headers or {}
+
+ def urlopen(
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ encode_multipart: bool = True,
+ multipart_boundary: str | None = None,
+ **kw: typing.Any,
+ ) -> BaseHTTPResponse: # Abstract
+ raise NotImplementedError(
+ "Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method."
+ )
+
+ def request(
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ fields: _TYPE_FIELDS | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ json: typing.Any | None = None,
+ **urlopen_kw: typing.Any,
+ ) -> BaseHTTPResponse:
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param body:
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+ :param fields:
+ Data to encode and send in the URL or request body, depending on ``method``.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param json:
+ Data to encode and send as JSON with UTF-encoded in the request body.
+ The ``"Content-Type"`` header will be set to ``"application/json"``
+ unless specified otherwise.
+ """
+ method = method.upper()
+
+ if json is not None and body is not None:
+ raise TypeError(
+ "request got values for both 'body' and 'json' parameters which are mutually exclusive"
+ )
+
+ if json is not None:
+ if headers is None:
+ headers = self.headers
+
+ if not ("content-type" in map(str.lower, headers.keys())):
+ headers = HTTPHeaderDict(headers)
+ headers["Content-Type"] = "application/json"
+
+ body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode(
+ "utf-8"
+ )
+
+ if body is not None:
+ urlopen_kw["body"] = body
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(
+ method,
+ url,
+ fields=fields, # type: ignore[arg-type]
+ headers=headers,
+ **urlopen_kw,
+ )
+ else:
+ return self.request_encode_body(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
+
+ def request_encode_url(
+ self,
+ method: str,
+ url: str,
+ fields: _TYPE_ENCODE_URL_FIELDS | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ **urlopen_kw: str,
+ ) -> BaseHTTPResponse:
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param fields:
+ Data to encode and send in the URL.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw: dict[str, typing.Any] = {"headers": headers}
+ extra_kw.update(urlopen_kw)
+
+ if fields:
+ url += "?" + urlencode(fields)
+
+ return self.urlopen(method, url, **extra_kw)
+
+ def request_encode_body(
+ self,
+ method: str,
+ url: str,
+ fields: _TYPE_FIELDS | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ encode_multipart: bool = True,
+ multipart_boundary: str | None = None,
+ **urlopen_kw: str,
+ ) -> BaseHTTPResponse:
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :func:`urllib3.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
+ :func:`urllib.parse.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+ the MIME type is optional. For example::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
+ 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimic behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param fields:
+ Data to encode and send in the request body.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param encode_multipart:
+ If True, encode the ``fields`` using the multipart/form-data MIME
+ format.
+
+ :param multipart_boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`urllib3.filepost.choose_boundary`.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)}
+ body: bytes | str
+
+ if fields:
+ if "body" in urlopen_kw:
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one."
+ )
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(
+ fields, boundary=multipart_boundary
+ )
+ else:
+ body, content_type = (
+ urlencode(fields), # type: ignore[arg-type]
+ "application/x-www-form-urlencoded",
+ )
+
+ extra_kw["body"] = body
+ extra_kw["headers"].setdefault("Content-Type", content_type)
+
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_version.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_version.py"
new file mode 100644
index 0000000..af8c3f3
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/_version.py"
@@ -0,0 +1,34 @@
+# file generated by setuptools-scm
+# don't change, don't track in version control
+
+__all__ = [
+ "__version__",
+ "__version_tuple__",
+ "version",
+ "version_tuple",
+ "__commit_id__",
+ "commit_id",
+]
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Tuple
+ from typing import Union
+
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
+ COMMIT_ID = Union[str, None]
+else:
+ VERSION_TUPLE = object
+ COMMIT_ID = object
+
+version: str
+__version__: str
+__version_tuple__: VERSION_TUPLE
+version_tuple: VERSION_TUPLE
+commit_id: COMMIT_ID
+__commit_id__: COMMIT_ID
+
+__version__ = version = '2.6.2'
+__version_tuple__ = version_tuple = (2, 6, 2)
+
+__commit_id__ = commit_id = None
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/connection.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/connection.py"
new file mode 100644
index 0000000..2ceeb0a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/connection.py"
@@ -0,0 +1,1099 @@
+from __future__ import annotations
+
+import datetime
+import http.client
+import logging
+import os
+import re
+import socket
+import sys
+import threading
+import typing
+import warnings
+from http.client import HTTPConnection as _HTTPConnection
+from http.client import HTTPException as HTTPException # noqa: F401
+from http.client import ResponseNotReady
+from socket import timeout as SocketTimeout
+
+if typing.TYPE_CHECKING:
+ from .response import HTTPResponse
+ from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT
+ from .util.ssltransport import SSLTransport
+
+from ._collections import HTTPHeaderDict
+from .http2 import probe as http2_probe
+from .util.response import assert_header_parsing
+from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout
+from .util.util import to_str
+from .util.wait import wait_for_read
+
+try: # Compiled with SSL?
+ import ssl
+
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError):
+ ssl = None # type: ignore[assignment]
+
+ class BaseSSLError(BaseException): # type: ignore[no-redef]
+ pass
+
+
+from ._base_connection import _TYPE_BODY
+from ._base_connection import ProxyConfig as ProxyConfig
+from ._base_connection import _ResponseOptions as _ResponseOptions
+from ._version import __version__
+from .exceptions import (
+ ConnectTimeoutError,
+ HeaderParsingError,
+ NameResolutionError,
+ NewConnectionError,
+ ProxyError,
+ SystemTimeWarning,
+)
+from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_
+from .util.request import body_to_chunks
+from .util.ssl_ import assert_fingerprint as _assert_fingerprint
+from .util.ssl_ import (
+ create_urllib3_context,
+ is_ipaddress,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+from .util.ssl_match_hostname import CertificateError, match_hostname
+from .util.url import Url
+
+# Not a no-op, we're adding this to the namespace so it can be imported.
+ConnectionError = ConnectionError
+BrokenPipeError = BrokenPipeError
+
+
+log = logging.getLogger(__name__)
+
+port_by_scheme = {"http": 80, "https": 443}
+
+# When it comes time to update this value as a part of regular maintenance
+# (ie test_recent_date is failing) update it to ~6 months before the current date.
+RECENT_DATE = datetime.date(2025, 1, 1)
+
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+
+
+class HTTPConnection(_HTTPConnection):
+ """
+ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``source_address``: Set the source address for the current connection.
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass:
+
+ .. code-block:: python
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
+ default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc]
+
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [
+ (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ ]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified: bool = False
+
+ #: Whether this proxy connection verified the proxy host's certificate.
+ # If no proxy is currently connected to the value will be ``None``.
+ proxy_is_verified: bool | None = None
+
+ blocksize: int
+ source_address: tuple[str, int] | None
+ socket_options: connection._TYPE_SOCKET_OPTIONS | None
+
+ _has_connected_to_proxy: bool
+ _response_options: _ResponseOptions | None
+ _tunnel_host: str | None
+ _tunnel_port: int | None
+ _tunnel_scheme: str | None
+
+ def __init__(
+ self,
+ host: str,
+ port: int | None = None,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 16384,
+ socket_options: None | (
+ connection._TYPE_SOCKET_OPTIONS
+ ) = default_socket_options,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ ) -> None:
+ super().__init__(
+ host=host,
+ port=port,
+ timeout=Timeout.resolve_default_timeout(timeout),
+ source_address=source_address,
+ blocksize=blocksize,
+ )
+ self.socket_options = socket_options
+ self.proxy = proxy
+ self.proxy_config = proxy_config
+
+ self._has_connected_to_proxy = False
+ self._response_options = None
+ self._tunnel_host: str | None = None
+ self._tunnel_port: int | None = None
+ self._tunnel_scheme: str | None = None
+
+ def __str__(self) -> str:
+ return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})"
+
+ def __repr__(self) -> str:
+ return f"<{self} at {id(self):#x}>"
+
+ @property
+ def host(self) -> str:
+ """
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+ In general, SSL certificates don't include the trailing dot indicating a
+ fully-qualified domain name, and thus, they don't validate properly when
+ checked against a domain name that includes the dot. In addition, some
+ servers may not expect to receive the trailing dot when provided.
+
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
+ whereas a lookup without a trailing dot will search the system's search domain
+ list. Thus, it's important to keep the original host around for use only in
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+ actual TCP connection across which we're going to send HTTP requests).
+ """
+ return self._dns_host.rstrip(".")
+
+ @host.setter
+ def host(self, value: str) -> None:
+ """
+ Setter for the `host` property.
+
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
+ only uses `host`, and it seems reasonable that other libraries follow suit.
+ """
+ self._dns_host = value
+
+ def _new_conn(self) -> socket.socket:
+ """Establish a socket connection and set nodelay settings on it.
+
+ :return: New socket connection.
+ """
+ try:
+ sock = connection.create_connection(
+ (self._dns_host, self.port),
+ self.timeout,
+ source_address=self.source_address,
+ socket_options=self.socket_options,
+ )
+ except socket.gaierror as e:
+ raise NameResolutionError(self.host, self, e) from e
+ except SocketTimeout as e:
+ raise ConnectTimeoutError(
+ self,
+ f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
+ ) from e
+
+ except OSError as e:
+ raise NewConnectionError(
+ self, f"Failed to establish a new connection: {e}"
+ ) from e
+
+ sys.audit("http.client.connect", self, self.host, self.port)
+
+ return sock
+
+ def set_tunnel(
+ self,
+ host: str,
+ port: int | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ scheme: str = "http",
+ ) -> None:
+ if scheme not in ("http", "https"):
+ raise ValueError(
+ f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'"
+ )
+ super().set_tunnel(host, port=port, headers=headers)
+ self._tunnel_scheme = scheme
+
+ if sys.version_info < (3, 11, 9) or ((3, 12) <= sys.version_info < (3, 12, 3)):
+ # Taken from python/cpython#100986 which was backported in 3.11.9 and 3.12.3.
+ # When using connection_from_host, host will come without brackets.
+ def _wrap_ipv6(self, ip: bytes) -> bytes:
+ if b":" in ip and ip[0] != b"["[0]:
+ return b"[" + ip + b"]"
+ return ip
+
+ if sys.version_info < (3, 11, 9):
+ # `_tunnel` copied from 3.11.13 backporting
+ # https://github.com/python/cpython/commit/0d4026432591d43185568dd31cef6a034c4b9261
+ # and https://github.com/python/cpython/commit/6fbc61070fda2ffb8889e77e3b24bca4249ab4d1
+ def _tunnel(self) -> None:
+ _MAXLINE = http.client._MAXLINE # type: ignore[attr-defined]
+ connect = b"CONNECT %s:%d HTTP/1.0\r\n" % ( # type: ignore[str-format]
+ self._wrap_ipv6(self._tunnel_host.encode("ascii")), # type: ignore[union-attr]
+ self._tunnel_port,
+ )
+ headers = [connect]
+ for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined]
+ headers.append(f"{header}: {value}\r\n".encode("latin-1"))
+ headers.append(b"\r\n")
+ # Making a single send() call instead of one per line encourages
+ # the host OS to use a more optimal packet size instead of
+ # potentially emitting a series of small packets.
+ self.send(b"".join(headers))
+ del headers
+
+ response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined]
+ try:
+ (version, code, message) = response._read_status() # type: ignore[attr-defined]
+
+ if code != http.HTTPStatus.OK:
+ self.close()
+ raise OSError(
+ f"Tunnel connection failed: {code} {message.strip()}"
+ )
+ while True:
+ line = response.fp.readline(_MAXLINE + 1)
+ if len(line) > _MAXLINE:
+ raise http.client.LineTooLong("header line")
+ if not line:
+ # for sites which EOF without sending a trailer
+ break
+ if line in (b"\r\n", b"\n", b""):
+ break
+
+ if self.debuglevel > 0:
+ print("header:", line.decode())
+ finally:
+ response.close()
+
+ elif (3, 12) <= sys.version_info < (3, 12, 3):
+ # `_tunnel` copied from 3.12.11 backporting
+ # https://github.com/python/cpython/commit/23aef575c7629abcd4aaf028ebd226fb41a4b3c8
+ def _tunnel(self) -> None: # noqa: F811
+ connect = b"CONNECT %s:%d HTTP/1.1\r\n" % ( # type: ignore[str-format]
+ self._wrap_ipv6(self._tunnel_host.encode("idna")), # type: ignore[union-attr]
+ self._tunnel_port,
+ )
+ headers = [connect]
+ for header, value in self._tunnel_headers.items(): # type: ignore[attr-defined]
+ headers.append(f"{header}: {value}\r\n".encode("latin-1"))
+ headers.append(b"\r\n")
+ # Making a single send() call instead of one per line encourages
+ # the host OS to use a more optimal packet size instead of
+ # potentially emitting a series of small packets.
+ self.send(b"".join(headers))
+ del headers
+
+ response = self.response_class(self.sock, method=self._method) # type: ignore[attr-defined]
+ try:
+ (version, code, message) = response._read_status() # type: ignore[attr-defined]
+
+ self._raw_proxy_headers = http.client._read_headers(response.fp) # type: ignore[attr-defined]
+
+ if self.debuglevel > 0:
+ for header in self._raw_proxy_headers:
+ print("header:", header.decode())
+
+ if code != http.HTTPStatus.OK:
+ self.close()
+ raise OSError(
+ f"Tunnel connection failed: {code} {message.strip()}"
+ )
+
+ finally:
+ response.close()
+
+ def connect(self) -> None:
+ self.sock = self._new_conn()
+ if self._tunnel_host:
+ # If we're tunneling it means we're connected to our proxy.
+ self._has_connected_to_proxy = True
+
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
+ self._tunnel()
+
+ # If there's a proxy to be connected to we are fully connected.
+ # This is set twice (once above and here) due to forwarding proxies
+ # not using tunnelling.
+ self._has_connected_to_proxy = bool(self.proxy)
+
+ if self._has_connected_to_proxy:
+ self.proxy_is_verified = False
+
+ @property
+ def is_closed(self) -> bool:
+ return self.sock is None
+
+ @property
+ def is_connected(self) -> bool:
+ if self.sock is None:
+ return False
+ return not wait_for_read(self.sock, timeout=0.0)
+
+ @property
+ def has_connected_to_proxy(self) -> bool:
+ return self._has_connected_to_proxy
+
+ @property
+ def proxy_is_forwarding(self) -> bool:
+ """
+ Return True if a forwarding proxy is configured, else return False
+ """
+ return bool(self.proxy) and self._tunnel_host is None
+
+ @property
+ def proxy_is_tunneling(self) -> bool:
+ """
+ Return True if a tunneling proxy is configured, else return False
+ """
+ return self._tunnel_host is not None
+
+ def close(self) -> None:
+ try:
+ super().close()
+ finally:
+ # Reset all stateful properties so connection
+ # can be re-used without leaking prior configs.
+ self.sock = None
+ self.is_verified = False
+ self.proxy_is_verified = None
+ self._has_connected_to_proxy = False
+ self._response_options = None
+ self._tunnel_host = None
+ self._tunnel_port = None
+ self._tunnel_scheme = None
+
+ def putrequest(
+ self,
+ method: str,
+ url: str,
+ skip_host: bool = False,
+ skip_accept_encoding: bool = False,
+ ) -> None:
+ """"""
+ # Empty docstring because the indentation of CPython's implementation
+ # is broken but we don't want this method in our documentation.
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+ if match:
+ raise ValueError(
+ f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})"
+ )
+
+ return super().putrequest(
+ method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding
+ )
+
+ def putheader(self, header: str, *values: str) -> None: # type: ignore[override]
+ """"""
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
+ super().putheader(header, *values)
+ elif to_str(header.lower()) not in SKIPPABLE_HEADERS:
+ skippable_headers = "', '".join(
+ [str.title(header) for header in sorted(SKIPPABLE_HEADERS)]
+ )
+ raise ValueError(
+ f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'"
+ )
+
+ # `request` method's signature intentionally violates LSP.
+ # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental.
+ def request( # type: ignore[override]
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ *,
+ chunked: bool = False,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ ) -> None:
+ # Update the inner socket's timeout value to send the request.
+ # This only triggers if the connection is re-used.
+ if self.sock is not None:
+ self.sock.settimeout(self.timeout)
+
+ # Store these values to be fed into the HTTPResponse
+ # object later. TODO: Remove this in favor of a real
+ # HTTP lifecycle mechanism.
+
+ # We have to store these before we call .request()
+ # because sometimes we can still salvage a response
+ # off the wire even if we aren't able to completely
+ # send the request body.
+ self._response_options = _ResponseOptions(
+ request_method=method,
+ request_url=url,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ enforce_content_length=enforce_content_length,
+ )
+
+ if headers is None:
+ headers = {}
+ header_keys = frozenset(to_str(k.lower()) for k in headers)
+ skip_accept_encoding = "accept-encoding" in header_keys
+ skip_host = "host" in header_keys
+ self.putrequest(
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+ )
+
+ # Transform the body into an iterable of sendall()-able chunks
+ # and detect if an explicit Content-Length is doable.
+ chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize)
+ chunks = chunks_and_cl.chunks
+ content_length = chunks_and_cl.content_length
+
+ # When chunked is explicit set to 'True' we respect that.
+ if chunked:
+ if "transfer-encoding" not in header_keys:
+ self.putheader("Transfer-Encoding", "chunked")
+ else:
+ # Detect whether a framing mechanism is already in use. If so
+ # we respect that value, otherwise we pick chunked vs content-length
+ # depending on the type of 'body'.
+ if "content-length" in header_keys:
+ chunked = False
+ elif "transfer-encoding" in header_keys:
+ chunked = True
+
+ # Otherwise we go off the recommendation of 'body_to_chunks()'.
+ else:
+ chunked = False
+ if content_length is None:
+ if chunks is not None:
+ chunked = True
+ self.putheader("Transfer-Encoding", "chunked")
+ else:
+ self.putheader("Content-Length", str(content_length))
+
+ # Now that framing headers are out of the way we send all the other headers.
+ if "user-agent" not in header_keys:
+ self.putheader("User-Agent", _get_default_user_agent())
+ for header, value in headers.items():
+ self.putheader(header, value)
+ self.endheaders()
+
+ # If we're given a body we start sending that in chunks.
+ if chunks is not None:
+ for chunk in chunks:
+ # Sending empty chunks isn't allowed for TE: chunked
+ # as it indicates the end of the body.
+ if not chunk:
+ continue
+ if isinstance(chunk, str):
+ chunk = chunk.encode("utf-8")
+ if chunked:
+ self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk))
+ else:
+ self.send(chunk)
+
+ # Regardless of whether we have a body or not, if we're in
+ # chunked mode we want to send an explicit empty chunk.
+ if chunked:
+ self.send(b"0\r\n\r\n")
+
+ def request_chunked(
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ ) -> None:
+ """
+ Alternative to the common request method, which sends the
+ body with chunked encoding and not as one block
+ """
+ warnings.warn(
+ "HTTPConnection.request_chunked() is deprecated and will be removed "
+ "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+ self.request(method, url, body=body, headers=headers, chunked=True)
+
+ def getresponse( # type: ignore[override]
+ self,
+ ) -> HTTPResponse:
+ """
+ Get the response from the server.
+
+ If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable.
+
+ If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed.
+ """
+ # Raise the same error as http.client.HTTPConnection
+ if self._response_options is None:
+ raise ResponseNotReady()
+
+ # Reset this attribute for being used again.
+ resp_options = self._response_options
+ self._response_options = None
+
+ # Since the connection's timeout value may have been updated
+ # we need to set the timeout on the socket.
+ self.sock.settimeout(self.timeout)
+
+ # This is needed here to avoid circular import errors
+ from .response import HTTPResponse
+
+ # Save a reference to the shutdown function before ownership is passed
+ # to httplib_response
+ # TODO should we implement it everywhere?
+ _shutdown = getattr(self.sock, "shutdown", None)
+
+ # Get the response from http.client.HTTPConnection
+ httplib_response = super().getresponse()
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except (HeaderParsingError, TypeError) as hpe:
+ log.warning(
+ "Failed to parse headers (url=%s): %s",
+ _url_from_connection(self, resp_options.request_url),
+ hpe,
+ exc_info=True,
+ )
+
+ headers = HTTPHeaderDict(httplib_response.msg.items())
+
+ response = HTTPResponse(
+ body=httplib_response,
+ headers=headers,
+ status=httplib_response.status,
+ version=httplib_response.version,
+ version_string=getattr(self, "_http_vsn_str", "HTTP/?"),
+ reason=httplib_response.reason,
+ preload_content=resp_options.preload_content,
+ decode_content=resp_options.decode_content,
+ original_response=httplib_response,
+ enforce_content_length=resp_options.enforce_content_length,
+ request_method=resp_options.request_method,
+ request_url=resp_options.request_url,
+ sock_shutdown=_shutdown,
+ )
+ return response
+
+
+class HTTPSConnection(HTTPConnection):
+ """
+ Many of the parameters to this constructor are passed to the underlying SSL
+ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
+ """
+
+ default_port = port_by_scheme["https"] # type: ignore[misc]
+
+ cert_reqs: int | str | None = None
+ ca_certs: str | None = None
+ ca_cert_dir: str | None = None
+ ca_cert_data: None | str | bytes = None
+ ssl_version: int | str | None = None
+ ssl_minimum_version: int | None = None
+ ssl_maximum_version: int | None = None
+ assert_fingerprint: str | None = None
+ _connect_callback: typing.Callable[..., None] | None = None
+
+ def __init__(
+ self,
+ host: str,
+ port: int | None = None,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 16384,
+ socket_options: None | (
+ connection._TYPE_SOCKET_OPTIONS
+ ) = HTTPConnection.default_socket_options,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ cert_reqs: int | str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ server_hostname: str | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ ca_certs: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ssl_minimum_version: int | None = None,
+ ssl_maximum_version: int | None = None,
+ ssl_version: int | str | None = None, # Deprecated
+ cert_file: str | None = None,
+ key_file: str | None = None,
+ key_password: str | None = None,
+ ) -> None:
+ super().__init__(
+ host,
+ port=port,
+ timeout=timeout,
+ source_address=source_address,
+ blocksize=blocksize,
+ socket_options=socket_options,
+ proxy=proxy,
+ proxy_config=proxy_config,
+ )
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+ self.ssl_context = ssl_context
+ self.server_hostname = server_hostname
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ssl_version = ssl_version
+ self.ssl_minimum_version = ssl_minimum_version
+ self.ssl_maximum_version = ssl_maximum_version
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+ # cert_reqs depends on ssl_context so calculate last.
+ if cert_reqs is None:
+ if self.ssl_context is not None:
+ cert_reqs = self.ssl_context.verify_mode
+ else:
+ cert_reqs = resolve_cert_reqs(None)
+ self.cert_reqs = cert_reqs
+ self._connect_callback = None
+
+ def set_cert(
+ self,
+ key_file: str | None = None,
+ cert_file: str | None = None,
+ cert_reqs: int | str | None = None,
+ key_password: str | None = None,
+ ca_certs: str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ) -> None:
+ """
+ This method should only be called once, before the connection is used.
+ """
+ warnings.warn(
+ "HTTPSConnection.set_cert() is deprecated and will be removed "
+ "in urllib3 v2.1.0. Instead provide the parameters to the "
+ "HTTPSConnection constructor.",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
+ # have an SSLContext object in which case we'll use its verify_mode.
+ if cert_reqs is None:
+ if self.ssl_context is not None:
+ cert_reqs = self.ssl_context.verify_mode
+ else:
+ cert_reqs = resolve_cert_reqs(None)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+ def connect(self) -> None:
+ # Today we don't need to be doing this step before the /actual/ socket
+ # connection, however in the future we'll need to decide whether to
+ # create a new socket or re-use an existing "shared" socket as a part
+ # of the HTTP/2 handshake dance.
+ if self._tunnel_host is not None and self._tunnel_port is not None:
+ probe_http2_host = self._tunnel_host
+ probe_http2_port = self._tunnel_port
+ else:
+ probe_http2_host = self.host
+ probe_http2_port = self.port
+
+ # Check if the target origin supports HTTP/2.
+ # If the value comes back as 'None' it means that the current thread
+ # is probing for HTTP/2 support. Otherwise, we're waiting for another
+ # probe to complete, or we get a value right away.
+ target_supports_http2: bool | None
+ if "h2" in ssl_.ALPN_PROTOCOLS:
+ target_supports_http2 = http2_probe.acquire_and_get(
+ host=probe_http2_host, port=probe_http2_port
+ )
+ else:
+ # If HTTP/2 isn't going to be offered it doesn't matter if
+ # the target supports HTTP/2. Don't want to make a probe.
+ target_supports_http2 = False
+
+ if self._connect_callback is not None:
+ self._connect_callback(
+ "before connect",
+ thread_id=threading.get_ident(),
+ target_supports_http2=target_supports_http2,
+ )
+
+ try:
+ sock: socket.socket | ssl.SSLSocket
+ self.sock = sock = self._new_conn()
+ server_hostname: str = self.host
+ tls_in_tls = False
+
+ # Do we need to establish a tunnel?
+ if self.proxy_is_tunneling:
+ # We're tunneling to an HTTPS origin so need to do TLS-in-TLS.
+ if self._tunnel_scheme == "https":
+ # _connect_tls_proxy will verify and assign proxy_is_verified
+ self.sock = sock = self._connect_tls_proxy(self.host, sock)
+ tls_in_tls = True
+ elif self._tunnel_scheme == "http":
+ self.proxy_is_verified = False
+
+ # If we're tunneling it means we're connected to our proxy.
+ self._has_connected_to_proxy = True
+
+ self._tunnel()
+ # Override the host with the one we're requesting data from.
+ server_hostname = typing.cast(str, self._tunnel_host)
+
+ if self.server_hostname is not None:
+ server_hostname = self.server_hostname
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn(
+ (
+ f"System time is way off (before {RECENT_DATE}). This will probably "
+ "lead to SSL verification errors"
+ ),
+ SystemTimeWarning,
+ )
+
+ # Remove trailing '.' from fqdn hostnames to allow certificate validation
+ server_hostname_rm_dot = server_hostname.rstrip(".")
+
+ sock_and_verified = _ssl_wrap_socket_and_match_hostname(
+ sock=sock,
+ cert_reqs=self.cert_reqs,
+ ssl_version=self.ssl_version,
+ ssl_minimum_version=self.ssl_minimum_version,
+ ssl_maximum_version=self.ssl_maximum_version,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ cert_file=self.cert_file,
+ key_file=self.key_file,
+ key_password=self.key_password,
+ server_hostname=server_hostname_rm_dot,
+ ssl_context=self.ssl_context,
+ tls_in_tls=tls_in_tls,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint,
+ )
+ self.sock = sock_and_verified.socket
+
+ # If an error occurs during connection/handshake we may need to release
+ # our lock so another connection can probe the origin.
+ except BaseException:
+ if self._connect_callback is not None:
+ self._connect_callback(
+ "after connect failure",
+ thread_id=threading.get_ident(),
+ target_supports_http2=target_supports_http2,
+ )
+
+ if target_supports_http2 is None:
+ http2_probe.set_and_release(
+ host=probe_http2_host, port=probe_http2_port, supports_http2=None
+ )
+ raise
+
+ # If this connection doesn't know if the origin supports HTTP/2
+ # we report back to the HTTP/2 probe our result.
+ if target_supports_http2 is None:
+ supports_http2 = sock_and_verified.socket.selected_alpn_protocol() == "h2"
+ http2_probe.set_and_release(
+ host=probe_http2_host,
+ port=probe_http2_port,
+ supports_http2=supports_http2,
+ )
+
+ # Forwarding proxies can never have a verified target since
+ # the proxy is the one doing the verification. Should instead
+ # use a CONNECT tunnel in order to verify the target.
+ # See: https://github.com/urllib3/urllib3/issues/3267.
+ if self.proxy_is_forwarding:
+ self.is_verified = False
+ else:
+ self.is_verified = sock_and_verified.is_verified
+
+ # If there's a proxy to be connected to we are fully connected.
+ # This is set twice (once above and here) due to forwarding proxies
+ # not using tunnelling.
+ self._has_connected_to_proxy = bool(self.proxy)
+
+ # Set `self.proxy_is_verified` unless it's already set while
+ # establishing a tunnel.
+ if self._has_connected_to_proxy and self.proxy_is_verified is None:
+ self.proxy_is_verified = sock_and_verified.is_verified
+
+ def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket:
+ """
+ Establish a TLS connection to the proxy using the provided SSL context.
+ """
+ # `_connect_tls_proxy` is called when self._tunnel_host is truthy.
+ proxy_config = typing.cast(ProxyConfig, self.proxy_config)
+ ssl_context = proxy_config.ssl_context
+ sock_and_verified = _ssl_wrap_socket_and_match_hostname(
+ sock,
+ cert_reqs=self.cert_reqs,
+ ssl_version=self.ssl_version,
+ ssl_minimum_version=self.ssl_minimum_version,
+ ssl_maximum_version=self.ssl_maximum_version,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ assert_hostname=proxy_config.assert_hostname,
+ assert_fingerprint=proxy_config.assert_fingerprint,
+ # Features that aren't implemented for proxies yet:
+ cert_file=None,
+ key_file=None,
+ key_password=None,
+ tls_in_tls=False,
+ )
+ self.proxy_is_verified = sock_and_verified.is_verified
+ return sock_and_verified.socket # type: ignore[return-value]
+
+
+class _WrappedAndVerifiedSocket(typing.NamedTuple):
+ """
+ Wrapped socket and whether the connection is
+ verified after the TLS handshake
+ """
+
+ socket: ssl.SSLSocket | SSLTransport
+ is_verified: bool
+
+
+def _ssl_wrap_socket_and_match_hostname(
+ sock: socket.socket,
+ *,
+ cert_reqs: None | str | int,
+ ssl_version: None | str | int,
+ ssl_minimum_version: int | None,
+ ssl_maximum_version: int | None,
+ cert_file: str | None,
+ key_file: str | None,
+ key_password: str | None,
+ ca_certs: str | None,
+ ca_cert_dir: str | None,
+ ca_cert_data: None | str | bytes,
+ assert_hostname: None | str | typing.Literal[False],
+ assert_fingerprint: str | None,
+ server_hostname: str | None,
+ ssl_context: ssl.SSLContext | None,
+ tls_in_tls: bool = False,
+) -> _WrappedAndVerifiedSocket:
+ """Logic for constructing an SSLContext from all TLS parameters, passing
+ that down into ssl_wrap_socket, and then doing certificate verification
+ either via hostname or fingerprint. This function exists to guarantee
+ that both proxies and targets have the same behavior when connecting via TLS.
+ """
+ default_ssl_context = False
+ if ssl_context is None:
+ default_ssl_context = True
+ context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(ssl_version),
+ ssl_minimum_version=ssl_minimum_version,
+ ssl_maximum_version=ssl_maximum_version,
+ cert_reqs=resolve_cert_reqs(cert_reqs),
+ )
+ else:
+ context = ssl_context
+
+ context.verify_mode = resolve_cert_reqs(cert_reqs)
+
+ # In some cases, we want to verify hostnames ourselves
+ if (
+ # `ssl` can't verify fingerprints or alternate hostnames
+ assert_fingerprint
+ or assert_hostname
+ # assert_hostname can be set to False to disable hostname checking
+ or assert_hostname is False
+ # We still support OpenSSL 1.0.2, which prevents us from verifying
+ # hostnames easily: https://github.com/pyca/pyopenssl/pull/933
+ or ssl_.IS_PYOPENSSL
+ or not ssl_.HAS_NEVER_CHECK_COMMON_NAME
+ ):
+ context.check_hostname = False
+
+ # Try to load OS default certs if none are given. We need to do the hasattr() check
+ # for custom pyOpenSSL SSLContext objects because they don't support
+ # load_default_certs().
+ if (
+ not ca_certs
+ and not ca_cert_dir
+ and not ca_cert_data
+ and default_ssl_context
+ and hasattr(context, "load_default_certs")
+ ):
+ context.load_default_certs()
+
+ # Ensure that IPv6 addresses are in the proper format and don't have a
+ # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses
+ # and interprets them as DNS hostnames.
+ if server_hostname is not None:
+ normalized = server_hostname.strip("[]")
+ if "%" in normalized:
+ normalized = normalized[: normalized.rfind("%")]
+ if is_ipaddress(normalized):
+ server_hostname = normalized
+
+ ssl_sock = ssl_wrap_socket(
+ sock=sock,
+ keyfile=key_file,
+ certfile=cert_file,
+ key_password=key_password,
+ ca_certs=ca_certs,
+ ca_cert_dir=ca_cert_dir,
+ ca_cert_data=ca_cert_data,
+ server_hostname=server_hostname,
+ ssl_context=context,
+ tls_in_tls=tls_in_tls,
+ )
+
+ try:
+ if assert_fingerprint:
+ _assert_fingerprint(
+ ssl_sock.getpeercert(binary_form=True), assert_fingerprint
+ )
+ elif (
+ context.verify_mode != ssl.CERT_NONE
+ and not context.check_hostname
+ and assert_hostname is not False
+ ):
+ cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment]
+
+ # Need to signal to our match_hostname whether to use 'commonName' or not.
+ # If we're using our own constructed SSLContext we explicitly set 'False'
+ # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name.
+ if default_ssl_context:
+ hostname_checks_common_name = False
+ else:
+ hostname_checks_common_name = (
+ getattr(context, "hostname_checks_common_name", False) or False
+ )
+
+ _match_hostname(
+ cert,
+ assert_hostname or server_hostname, # type: ignore[arg-type]
+ hostname_checks_common_name,
+ )
+
+ return _WrappedAndVerifiedSocket(
+ socket=ssl_sock,
+ is_verified=context.verify_mode == ssl.CERT_REQUIRED
+ or bool(assert_fingerprint),
+ )
+ except BaseException:
+ ssl_sock.close()
+ raise
+
+
+def _match_hostname(
+ cert: _TYPE_PEER_CERT_RET_DICT | None,
+ asserted_hostname: str,
+ hostname_checks_common_name: bool = False,
+) -> None:
+ # Our upstream implementation of ssl.match_hostname()
+ # only applies this normalization to IP addresses so it doesn't
+ # match DNS SANs so we do the same thing!
+ stripped_hostname = asserted_hostname.strip("[]")
+ if is_ipaddress(stripped_hostname):
+ asserted_hostname = stripped_hostname
+
+ try:
+ match_hostname(cert, asserted_hostname, hostname_checks_common_name)
+ except CertificateError as e:
+ log.warning(
+ "Certificate did not match expected hostname: %s. Certificate: %s",
+ asserted_hostname,
+ cert,
+ )
+ # Add cert to exception and reraise so client code can inspect
+ # the cert when catching the exception, if they want to
+ e._peer_cert = cert # type: ignore[attr-defined]
+ raise
+
+
+def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError:
+ # Look for the phrase 'wrong version number', if found
+ # then we should warn the user that we're very sure that
+ # this proxy is HTTP-only and they have a configuration issue.
+ error_normalized = " ".join(re.split("[^a-z]", str(err).lower()))
+ is_likely_http_proxy = (
+ "wrong version number" in error_normalized
+ or "unknown protocol" in error_normalized
+ or "record layer failure" in error_normalized
+ )
+ http_proxy_warning = (
+ ". Your proxy appears to only use HTTP and not HTTPS, "
+ "try changing your proxy URL to be HTTP. See: "
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "#https-proxy-error-http-proxy"
+ )
+ new_err = ProxyError(
+ f"Unable to connect to proxy"
+ f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}",
+ err,
+ )
+ new_err.__cause__ = err
+ return new_err
+
+
+def _get_default_user_agent() -> str:
+ return f"python-urllib3/{__version__}"
+
+
+class DummyConnection:
+ """Used to detect a failed ConnectionCls import."""
+
+
+if not ssl:
+ HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811
+
+
+VerifiedHTTPSConnection = HTTPSConnection
+
+
+def _url_from_connection(
+ conn: HTTPConnection | HTTPSConnection, path: str | None = None
+) -> str:
+ """Returns the URL from a given connection. This is mainly used for testing and logging."""
+
+ scheme = "https" if isinstance(conn, HTTPSConnection) else "http"
+
+ return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/connectionpool.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/connectionpool.py"
new file mode 100644
index 0000000..3a0685b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/connectionpool.py"
@@ -0,0 +1,1178 @@
+from __future__ import annotations
+
+import errno
+import logging
+import queue
+import sys
+import typing
+import warnings
+import weakref
+from socket import timeout as SocketTimeout
+from types import TracebackType
+
+from ._base_connection import _TYPE_BODY
+from ._collections import HTTPHeaderDict
+from ._request_methods import RequestMethods
+from .connection import (
+ BaseSSLError,
+ BrokenPipeError,
+ DummyConnection,
+ HTTPConnection,
+ HTTPException,
+ HTTPSConnection,
+ ProxyConfig,
+ _wrap_proxy_error,
+)
+from .connection import port_by_scheme as port_by_scheme
+from .exceptions import (
+ ClosedPoolError,
+ EmptyPoolError,
+ FullPoolError,
+ HostChangedError,
+ InsecureRequestWarning,
+ LocationValueError,
+ MaxRetryError,
+ NewConnectionError,
+ ProtocolError,
+ ProxyError,
+ ReadTimeoutError,
+ SSLError,
+ TimeoutError,
+)
+from .response import BaseHTTPResponse
+from .util.connection import is_connection_dropped
+from .util.proxy import connection_requires_http_tunnel
+from .util.request import _TYPE_BODY_POSITION, set_file_position
+from .util.retry import Retry
+from .util.ssl_match_hostname import CertificateError
+from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
+from .util.url import Url, _encode_target
+from .util.url import _normalize_host as normalize_host
+from .util.url import parse_url
+from .util.util import to_str
+
+if typing.TYPE_CHECKING:
+ import ssl
+
+ from typing_extensions import Self
+
+ from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection
+
+log = logging.getLogger(__name__)
+
+_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None]
+
+
+# Pool objects
+class ConnectionPool:
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+
+ .. note::
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
+ which is useful if your target server doesn't support percent-encoded
+ target URIs.
+ """
+
+ scheme: str | None = None
+ QueueCls = queue.LifoQueue
+
+ def __init__(self, host: str, port: int | None = None) -> None:
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ self.host = _normalize_host(host, scheme=self.scheme)
+ self.port = port
+
+ # This property uses 'normalize_host()' (not '_normalize_host()')
+ # to avoid removing square braces around IPv6 addresses.
+ # This value is sent to `HTTPConnection.set_tunnel()` if called
+ # because square braces are required for HTTP CONNECT tunneling.
+ self._tunnel_host = normalize_host(host, scheme=self.scheme).lower()
+
+ def __str__(self) -> str:
+ return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})"
+
+ def __enter__(self) -> Self:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> typing.Literal[False]:
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close(self) -> None:
+ """
+ Close all pooled connections and disable the pool.
+ """
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`http.client.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`http.client.HTTPConnection`.
+
+ :param timeout:
+ Socket timeout in seconds for each individual connection. This can
+ be a float or integer, which sets the timeout for the HTTP request,
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
+ fine-grained control over request timeouts. After the constructor has
+ been parsed, this is always a `urllib3.util.Timeout` object.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to False, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
+ :param _proxy:
+ Parsed proxy URL, should not be used directly, instead, see
+ :class:`urllib3.ProxyManager`
+
+ :param _proxy_headers:
+ A dictionary with proxy headers, should not be used directly,
+ instead, see :class:`urllib3.ProxyManager`
+
+ :param \\**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
+ """
+
+ scheme = "http"
+ ConnectionCls: type[BaseHTTPConnection] | type[BaseHTTPSConnection] = HTTPConnection
+
+ def __init__(
+ self,
+ host: str,
+ port: int | None = None,
+ timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT,
+ maxsize: int = 1,
+ block: bool = False,
+ headers: typing.Mapping[str, str] | None = None,
+ retries: Retry | bool | int | None = None,
+ _proxy: Url | None = None,
+ _proxy_headers: typing.Mapping[str, str] | None = None,
+ _proxy_config: ProxyConfig | None = None,
+ **conn_kw: typing.Any,
+ ):
+ ConnectionPool.__init__(self, host, port)
+ RequestMethods.__init__(self, headers)
+
+ if not isinstance(timeout, Timeout):
+ timeout = Timeout.from_float(timeout)
+
+ if retries is None:
+ retries = Retry.DEFAULT
+
+ self.timeout = timeout
+ self.retries = retries
+
+ self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize)
+ self.block = block
+
+ self.proxy = _proxy
+ self.proxy_headers = _proxy_headers or {}
+ self.proxy_config = _proxy_config
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in range(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault("socket_options", [])
+
+ self.conn_kw["proxy"] = self.proxy
+ self.conn_kw["proxy_config"] = self.proxy_config
+
+ # Do not pass 'self' as callback to 'finalize'.
+ # Then the 'finalize' would keep an endless living (leak) to self.
+ # By just passing a reference to the pool allows the garbage collector
+ # to free self if nobody else has a reference to it.
+ pool = self.pool
+
+ # Close all the HTTPConnections in the pool before the
+ # HTTPConnectionPool object is garbage collected.
+ weakref.finalize(self, _close_pool_connections, pool)
+
+ def _new_conn(self) -> BaseHTTPConnection:
+ """
+ Return a fresh :class:`HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.debug(
+ "Starting new HTTP connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "80",
+ )
+
+ conn = self.ConnectionCls(
+ host=self.host,
+ port=self.port,
+ timeout=self.timeout.connect_timeout,
+ **self.conn_kw,
+ )
+ return conn
+
+ def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection:
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+
+ if self.pool is None:
+ raise ClosedPoolError(self, "Pool is closed.")
+
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ except AttributeError: # self.pool is None
+ raise ClosedPoolError(self, "Pool is closed.") from None # Defensive:
+
+ except queue.Empty:
+ if self.block:
+ raise EmptyPoolError(
+ self,
+ "Pool is empty and a new connection can't be opened due to blocking mode.",
+ ) from None
+ pass # Oh well, we'll create a new connection then
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and is_connection_dropped(conn):
+ log.debug("Resetting dropped connection: %s", self.host)
+ conn.close()
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn: BaseHTTPConnection | None) -> None:
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is closed and discarded
+ because we exceeded maxsize. If connections are discarded frequently,
+ then maxsize should be increased.
+
+ If the pool is closed, then the connection will be closed and discarded.
+ """
+ if self.pool is not None:
+ try:
+ self.pool.put(conn, block=False)
+ return # Everything is dandy, done.
+ except AttributeError:
+ # self.pool is None.
+ pass
+ except queue.Full:
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ if self.block:
+ # This should never happen if you got the conn from self._get_conn
+ raise FullPoolError(
+ self,
+ "Pool reached maximum size and no more connections are allowed.",
+ ) from None
+
+ log.warning(
+ "Connection pool is full, discarding connection: %s. Connection pool size: %s",
+ self.host,
+ self.pool.qsize(),
+ )
+
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ def _validate_conn(self, conn: BaseHTTPConnection) -> None:
+ """
+ Called right before a request is made, after the socket is created.
+ """
+
+ def _prepare_proxy(self, conn: BaseHTTPConnection) -> None:
+ # Nothing to do for HTTP connections.
+ pass
+
+ def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout:
+ """Helper that always returns a :class:`urllib3.util.Timeout`"""
+ if timeout is _DEFAULT_TIMEOUT:
+ return self.timeout.clone()
+
+ if isinstance(timeout, Timeout):
+ return timeout.clone()
+ else:
+ # User passed us an int/float. This is for backwards compatibility,
+ # can be removed later
+ return Timeout.from_float(timeout)
+
+ def _raise_timeout(
+ self,
+ err: BaseSSLError | OSError | SocketTimeout,
+ url: str,
+ timeout_value: _TYPE_TIMEOUT | None,
+ ) -> None:
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(
+ self, url, f"Read timed out. (read timeout={timeout_value})"
+ ) from err
+
+ # See the above comment about EAGAIN in Python 3.
+ if hasattr(err, "errno") and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(
+ self, url, f"Read timed out. (read timeout={timeout_value})"
+ ) from err
+
+ def _make_request(
+ self,
+ conn: BaseHTTPConnection,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ retries: Retry | None = None,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ chunked: bool = False,
+ response_conn: BaseHTTPConnection | None = None,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ ) -> BaseHTTPResponse:
+ """
+ Perform a request on a given urllib connection object taken from our
+ pool.
+
+ :param conn:
+ a connection from one of our connection pools
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param body:
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param chunked:
+ If True, urllib3 will send the body using chunked transfer
+ encoding. Otherwise, urllib3 will send the body using the standard
+ content-length form. Defaults to False.
+
+ :param response_conn:
+ Set this to ``None`` if you will handle releasing the connection or
+ set the connection to have the response release it.
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param enforce_content_length:
+ Enforce content length checking. Body returned by server must match
+ value of Content-Length header, if present. Otherwise, raise error.
+ """
+ self.num_requests += 1
+
+ timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
+
+ try:
+ # Trigger any extra validation we need to do.
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # _validate_conn() starts the connection to an HTTPS proxy
+ # so we need to wrap errors with 'ProxyError' here too.
+ except (
+ OSError,
+ NewConnectionError,
+ TimeoutError,
+ BaseSSLError,
+ CertificateError,
+ SSLError,
+ ) as e:
+ new_e: Exception = e
+ if isinstance(e, (BaseSSLError, CertificateError)):
+ new_e = SSLError(e)
+ # If the connection didn't successfully connect to it's proxy
+ # then there
+ if isinstance(
+ new_e, (OSError, NewConnectionError, TimeoutError, SSLError)
+ ) and (conn and conn.proxy and not conn.has_connected_to_proxy):
+ new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
+ raise new_e
+
+ # conn.request() calls http.client.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ try:
+ conn.request(
+ method,
+ url,
+ body=body,
+ headers=headers,
+ chunked=chunked,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ enforce_content_length=enforce_content_length,
+ )
+
+ # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
+ # legitimately able to close the connection after sending a valid response.
+ # With this behaviour, the received response is still readable.
+ except BrokenPipeError:
+ pass
+ except OSError as e:
+ # MacOS/Linux
+ # EPROTOTYPE and ECONNRESET are needed on macOS
+ # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
+ # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE.
+ if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET:
+ raise
+
+ # Reset the timeout for the recv() on the socket
+ read_timeout = timeout_obj.read_timeout
+
+ if not conn.is_closed:
+ # In Python 3 socket.py will catch EAGAIN and return None when you
+ # try and read into the file pointer created by http.client, which
+ # instead raises a BadStatusLine exception. Instead of catching
+ # the exception and assuming all BadStatusLine exceptions are read
+ # timeouts, check for a zero timeout before making the request.
+ if read_timeout == 0:
+ raise ReadTimeoutError(
+ self, url, f"Read timed out. (read timeout={read_timeout})"
+ )
+ conn.timeout = read_timeout
+
+ # Receive the response from the server
+ try:
+ response = conn.getresponse()
+ except (BaseSSLError, OSError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+ raise
+
+ # Set properties that are used by the pooling layer.
+ response.retries = retries
+ response._connection = response_conn # type: ignore[attr-defined]
+ response._pool = self # type: ignore[attr-defined]
+
+ log.debug(
+ '%s://%s:%s "%s %s %s" %s %s',
+ self.scheme,
+ self.host,
+ self.port,
+ method,
+ url,
+ response.version_string,
+ response.status,
+ response.length_remaining,
+ )
+
+ return response
+
+ def close(self) -> None:
+ """
+ Close all pooled connections and disable the pool.
+ """
+ if self.pool is None:
+ return
+ # Disable access to the pool
+ old_pool, self.pool = self.pool, None
+
+ # Close all the HTTPConnections in the pool.
+ _close_pool_connections(old_pool)
+
+ def is_same_host(self, url: str) -> bool:
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ if url.startswith("/"):
+ return True
+
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, _, host, port, *_ = parse_url(url)
+ scheme = scheme or "http"
+ if host is not None:
+ host = _normalize_host(host, scheme=scheme)
+
+ # Use explicit default port for comparison when none is given
+ if self.port and not port:
+ port = port_by_scheme.get(scheme)
+ elif not self.port and port == port_by_scheme.get(scheme):
+ port = None
+
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+ def urlopen( # type: ignore[override]
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ retries: Retry | bool | int | None = None,
+ redirect: bool = True,
+ assert_same_host: bool = True,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ pool_timeout: int | None = None,
+ release_conn: bool | None = None,
+ chunked: bool = False,
+ body_pos: _TYPE_BODY_POSITION | None = None,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ **response_kw: typing.Any,
+ ) -> BaseHTTPResponse:
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method
+ such as :meth:`request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param body:
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When ``False``, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param bool preload_content:
+ If True, the response's body will be preloaded into memory.
+
+ :param bool decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of ``preload_content``
+ which defaults to ``True``.
+
+ :param bool chunked:
+ If True, urllib3 will send the body using chunked transfer
+ encoding. Otherwise, urllib3 will send the body using the standard
+ content-length form. Defaults to False.
+
+ :param int body_pos:
+ Position to seek to in file-like body in the event of a retry or
+ redirect. Typically this won't need to be set because urllib3 will
+ auto-populate the value when needed.
+ """
+ parsed_url = parse_url(url)
+ destination_scheme = parsed_url.scheme
+
+ if headers is None:
+ headers = self.headers
+
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if release_conn is None:
+ release_conn = preload_content
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ raise HostChangedError(self, url, retries)
+
+ # Ensure that the URL we're connecting to is properly encoded
+ if url.startswith("/"):
+ url = to_str(_encode_target(url))
+ else:
+ url = to_str(parsed_url.url)
+
+ conn = None
+
+ # Track whether `conn` needs to be released before
+ # returning/raising/recursing. Update this variable if necessary, and
+ # leave `release_conn` constant throughout the function. That way, if
+ # the function recurses, the original value of `release_conn` will be
+ # passed down into the recursive call, and its value will be respected.
+ #
+ # See issue #651 [1] for details.
+ #
+ # [1] <https://github.com/urllib3/urllib3/issues/651>
+ release_this_conn = release_conn
+
+ http_tunnel_required = connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, destination_scheme
+ )
+
+ # Merge the proxy headers. Only done when not using HTTP CONNECT. We
+ # have to copy the headers dict so we can safely change it without those
+ # changes being reflected in anyone else's copy.
+ if not http_tunnel_required:
+ headers = headers.copy() # type: ignore[attr-defined]
+ headers.update(self.proxy_headers) # type: ignore[union-attr]
+
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
+ # Keep track of whether we cleanly exited the except block. This
+ # ensures we do proper cleanup in finally.
+ clean_exit = False
+
+ # Rewind body position, if needed. Record current position
+ # for future rewinds in the event of a redirect/retry.
+ body_pos = set_file_position(body, body_pos)
+
+ try:
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
+
+ # Is this a closed/new connection that requires CONNECT tunnelling?
+ if self.proxy is not None and http_tunnel_required and conn.is_closed:
+ try:
+ self._prepare_proxy(conn)
+ except (BaseSSLError, OSError, SocketTimeout) as e:
+ self._raise_timeout(
+ err=e, url=self.proxy.url, timeout_value=conn.timeout
+ )
+ raise
+
+ # If we're going to release the connection in ``finally:``, then
+ # the response doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = conn if not release_conn else None
+
+ # Make the request on the HTTPConnection object
+ response = self._make_request(
+ conn,
+ method,
+ url,
+ timeout=timeout_obj,
+ body=body,
+ headers=headers,
+ chunked=chunked,
+ retries=retries,
+ response_conn=response_conn,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ **response_kw,
+ )
+
+ # Everything went great!
+ clean_exit = True
+
+ except EmptyPoolError:
+ # Didn't get a connection from the pool, no need to clean up
+ clean_exit = True
+ release_this_conn = False
+ raise
+
+ except (
+ TimeoutError,
+ HTTPException,
+ OSError,
+ ProtocolError,
+ BaseSSLError,
+ SSLError,
+ CertificateError,
+ ProxyError,
+ ) as e:
+ # Discard the connection for these exceptions. It will be
+ # replaced during the next _get_conn() call.
+ clean_exit = False
+ new_e: Exception = e
+ if isinstance(e, (BaseSSLError, CertificateError)):
+ new_e = SSLError(e)
+ if isinstance(
+ new_e,
+ (
+ OSError,
+ NewConnectionError,
+ TimeoutError,
+ SSLError,
+ HTTPException,
+ ),
+ ) and (conn and conn.proxy and not conn.has_connected_to_proxy):
+ new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
+ elif isinstance(new_e, (OSError, HTTPException)):
+ new_e = ProtocolError("Connection aborted.", new_e)
+
+ retries = retries.increment(
+ method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]
+ )
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
+ err = e
+
+ finally:
+ if not clean_exit:
+ # We hit some kind of exception, handled or otherwise. We need
+ # to throw the connection away unless explicitly told not to.
+ # Close the connection, set the variable to None, and make sure
+ # we put the None back in the pool to avoid leaking it.
+ if conn:
+ conn.close()
+ conn = None
+ release_this_conn = True
+
+ if release_this_conn:
+ # Put the connection back to be reused. If the connection is
+ # expired then it will be None, which will get replaced with a
+ # fresh connection during _get_conn.
+ self._put_conn(conn)
+
+ if not conn:
+ # Try again
+ log.warning(
+ "Retrying (%r) after connection broken by '%r': %s", retries, err, url
+ )
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries,
+ redirect,
+ assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ **response_kw,
+ )
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ if response.status == 303:
+ # Change the method according to RFC 9110, Section 15.4.4.
+ method = "GET"
+ # And lose the body not to transfer anything sensitive.
+ body = None
+ headers = HTTPHeaderDict(headers)._prepare_for_method_change()
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ response.drain_conn()
+ raise
+ return response
+
+ response.drain_conn()
+ retries.sleep_for_retry(response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ return self.urlopen(
+ method,
+ redirect_location,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ **response_kw,
+ )
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(response.headers.get("Retry-After"))
+ if retries.is_retry(method, response.status, has_retry_after):
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_status:
+ response.drain_conn()
+ raise
+ return response
+
+ response.drain_conn()
+ retries.sleep(response)
+ log.debug("Retry: %s", url)
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ preload_content=preload_content,
+ decode_content=decode_content,
+ **response_kw,
+ )
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
+ ``assert_hostname`` and ``host`` in this order to verify connections.
+ If ``assert_hostname`` is False, no verification is done.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ the connection socket into an SSL socket.
+ """
+
+ scheme = "https"
+ ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection
+
+ def __init__(
+ self,
+ host: str,
+ port: int | None = None,
+ timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT,
+ maxsize: int = 1,
+ block: bool = False,
+ headers: typing.Mapping[str, str] | None = None,
+ retries: Retry | bool | int | None = None,
+ _proxy: Url | None = None,
+ _proxy_headers: typing.Mapping[str, str] | None = None,
+ key_file: str | None = None,
+ cert_file: str | None = None,
+ cert_reqs: int | str | None = None,
+ key_password: str | None = None,
+ ca_certs: str | None = None,
+ ssl_version: int | str | None = None,
+ ssl_minimum_version: ssl.TLSVersion | None = None,
+ ssl_maximum_version: ssl.TLSVersion | None = None,
+ assert_hostname: str | typing.Literal[False] | None = None,
+ assert_fingerprint: str | None = None,
+ ca_cert_dir: str | None = None,
+ **conn_kw: typing.Any,
+ ) -> None:
+ super().__init__(
+ host,
+ port,
+ timeout,
+ maxsize,
+ block,
+ headers,
+ retries,
+ _proxy,
+ _proxy_headers,
+ **conn_kw,
+ )
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.ca_certs = ca_certs
+ self.ca_cert_dir = ca_cert_dir
+ self.ssl_version = ssl_version
+ self.ssl_minimum_version = ssl_minimum_version
+ self.ssl_maximum_version = ssl_maximum_version
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+
+ def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override]
+ """Establishes a tunnel connection through HTTP CONNECT."""
+ if self.proxy and self.proxy.scheme == "https":
+ tunnel_scheme = "https"
+ else:
+ tunnel_scheme = "http"
+
+ conn.set_tunnel(
+ scheme=tunnel_scheme,
+ host=self._tunnel_host,
+ port=self.port,
+ headers=self.proxy_headers,
+ )
+ conn.connect()
+
+ def _new_conn(self) -> BaseHTTPSConnection:
+ """
+ Return a fresh :class:`urllib3.connection.HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.debug(
+ "Starting new HTTPS connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "443",
+ )
+
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap]
+ raise ImportError(
+ "Can't connect to HTTPS URL because the SSL module is not available."
+ )
+
+ actual_host: str = self.host
+ actual_port = self.port
+ if self.proxy is not None and self.proxy.host is not None:
+ actual_host = self.proxy.host
+ actual_port = self.proxy.port
+
+ return self.ConnectionCls(
+ host=actual_host,
+ port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ cert_file=self.cert_file,
+ key_file=self.key_file,
+ key_password=self.key_password,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint,
+ ssl_version=self.ssl_version,
+ ssl_minimum_version=self.ssl_minimum_version,
+ ssl_maximum_version=self.ssl_maximum_version,
+ **self.conn_kw,
+ )
+
+ def _validate_conn(self, conn: BaseHTTPConnection) -> None:
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super()._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if conn.is_closed:
+ conn.connect()
+
+ # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791
+ if not conn.is_verified and not conn.proxy_is_verified:
+ warnings.warn(
+ (
+ f"Unverified HTTPS request is being made to host '{conn.host}'. "
+ "Adding certificate verification is strongly advised. See: "
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+ "#tls-warnings"
+ ),
+ InsecureRequestWarning,
+ )
+
+
+def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool:
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \\**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, _, host, port, *_ = parse_url(url)
+ scheme = scheme or "http"
+ port = port or port_by_scheme.get(scheme, 80)
+ if scheme == "https":
+ return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
+ else:
+ return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
+
+
+@typing.overload
+def _normalize_host(host: None, scheme: str | None) -> None: ...
+
+
+@typing.overload
+def _normalize_host(host: str, scheme: str | None) -> str: ...
+
+
+def _normalize_host(host: str | None, scheme: str | None) -> str | None:
+ """
+ Normalize hosts for comparisons and use with sockets.
+ """
+
+ host = normalize_host(host, scheme)
+
+ # httplib doesn't like it when we include brackets in IPv6 addresses
+ # Specifically, if we include brackets but also pass the port then
+ # httplib crazily doubles up the square brackets on the Host header.
+ # Instead, we need to make sure we never pass ``None`` as the port.
+ # However, for backward compatibility reasons we can't actually
+ # *assert* that. See http://bugs.python.org/issue28539
+ if host and host.startswith("[") and host.endswith("]"):
+ host = host[1:-1]
+ return host
+
+
+def _url_from_pool(
+ pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None
+) -> str:
+ """Returns the URL from a given connection pool. This is mainly used for testing and logging."""
+ return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url
+
+
+def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None:
+ """Drains a queue of connections and closes each one."""
+ try:
+ while True:
+ conn = pool.get(block=False)
+ if conn:
+ conn.close()
+ except queue.Empty:
+ pass # Done.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/__init__.py"
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/__init__.py"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/__init__.py"
new file mode 100644
index 0000000..8a3c5be
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/__init__.py"
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+import urllib3.connection
+
+from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection
+
+
+def inject_into_urllib3() -> None:
+ # override connection classes to use emscripten specific classes
+ # n.b. mypy complains about the overriding of classes below
+ # if it isn't ignored
+ HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection
+ HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection
+ urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment]
+ urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/connection.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/connection.py"
new file mode 100644
index 0000000..a5a353f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/connection.py"
@@ -0,0 +1,259 @@
+from __future__ import annotations
+
+import os
+import typing
+
+# use http.client.HTTPException for consistency with non-emscripten
+from http.client import HTTPException as HTTPException # noqa: F401
+from http.client import ResponseNotReady
+
+from ..._base_connection import _TYPE_BODY
+from ...connection import HTTPConnection, ProxyConfig, port_by_scheme
+from ...exceptions import TimeoutError
+from ...response import BaseHTTPResponse
+from ...util.connection import _TYPE_SOCKET_OPTIONS
+from ...util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
+from ...util.url import Url
+from .fetch import _RequestError, _TimeoutError, send_request, send_streaming_request
+from .request import EmscriptenRequest
+from .response import EmscriptenHttpResponseWrapper, EmscriptenResponse
+
+if typing.TYPE_CHECKING:
+ from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
+
+
+class EmscriptenHTTPConnection:
+ default_port: typing.ClassVar[int] = port_by_scheme["http"]
+ default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS]
+
+ timeout: None | (float)
+
+ host: str
+ port: int
+ blocksize: int
+ source_address: tuple[str, int] | None
+ socket_options: _TYPE_SOCKET_OPTIONS | None
+
+ proxy: Url | None
+ proxy_config: ProxyConfig | None
+
+ is_verified: bool = False
+ proxy_is_verified: bool | None = None
+
+ _response: EmscriptenResponse | None
+
+ def __init__(
+ self,
+ host: str,
+ port: int = 0,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 8192,
+ socket_options: _TYPE_SOCKET_OPTIONS | None = None,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ ) -> None:
+ self.host = host
+ self.port = port
+ self.timeout = timeout if isinstance(timeout, float) else 0.0
+ self.scheme = "http"
+ self._closed = True
+ self._response = None
+ # ignore these things because we don't
+ # have control over that stuff
+ self.proxy = None
+ self.proxy_config = None
+ self.blocksize = blocksize
+ self.source_address = None
+ self.socket_options = None
+ self.is_verified = False
+
+ def set_tunnel(
+ self,
+ host: str,
+ port: int | None = 0,
+ headers: typing.Mapping[str, str] | None = None,
+ scheme: str = "http",
+ ) -> None:
+ pass
+
+ def connect(self) -> None:
+ pass
+
+ def request(
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ # We know *at least* botocore is depending on the order of the
+ # first 3 parameters so to be safe we only mark the later ones
+ # as keyword-only to ensure we have space to extend.
+ *,
+ chunked: bool = False,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ ) -> None:
+ self._closed = False
+ if url.startswith("/"):
+ if self.port is not None:
+ port = f":{self.port}"
+ else:
+ port = ""
+ # no scheme / host / port included, make a full url
+ url = f"{self.scheme}://{self.host}{port}{url}"
+ request = EmscriptenRequest(
+ url=url,
+ method=method,
+ timeout=self.timeout if self.timeout else 0,
+ decode_content=decode_content,
+ )
+ request.set_body(body)
+ if headers:
+ for k, v in headers.items():
+ request.set_header(k, v)
+ self._response = None
+ try:
+ if not preload_content:
+ self._response = send_streaming_request(request)
+ if self._response is None:
+ self._response = send_request(request)
+ except _TimeoutError as e:
+ raise TimeoutError(e.message) from e
+ except _RequestError as e:
+ raise HTTPException(e.message) from e
+
+ def getresponse(self) -> BaseHTTPResponse:
+ if self._response is not None:
+ return EmscriptenHttpResponseWrapper(
+ internal_response=self._response,
+ url=self._response.request.url,
+ connection=self,
+ )
+ else:
+ raise ResponseNotReady()
+
+ def close(self) -> None:
+ self._closed = True
+ self._response = None
+
+ @property
+ def is_closed(self) -> bool:
+ """Whether the connection either is brand new or has been previously closed.
+ If this property is True then both ``is_connected`` and ``has_connected_to_proxy``
+ properties must be False.
+ """
+ return self._closed
+
+ @property
+ def is_connected(self) -> bool:
+ """Whether the connection is actively connected to any origin (proxy or target)"""
+ return True
+
+ @property
+ def has_connected_to_proxy(self) -> bool:
+ """Whether the connection has successfully connected to its proxy.
+ This returns False if no proxy is in use. Used to determine whether
+ errors are coming from the proxy layer or from tunnelling to the target origin.
+ """
+ return False
+
+
+class EmscriptenHTTPSConnection(EmscriptenHTTPConnection):
+ default_port = port_by_scheme["https"]
+ # all this is basically ignored, as browser handles https
+ cert_reqs: int | str | None = None
+ ca_certs: str | None = None
+ ca_cert_dir: str | None = None
+ ca_cert_data: None | str | bytes = None
+ cert_file: str | None
+ key_file: str | None
+ key_password: str | None
+ ssl_context: typing.Any | None
+ ssl_version: int | str | None = None
+ ssl_minimum_version: int | None = None
+ ssl_maximum_version: int | None = None
+ assert_hostname: None | str | typing.Literal[False]
+ assert_fingerprint: str | None = None
+
+ def __init__(
+ self,
+ host: str,
+ port: int = 0,
+ *,
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ blocksize: int = 16384,
+ socket_options: (
+ None | _TYPE_SOCKET_OPTIONS
+ ) = HTTPConnection.default_socket_options,
+ proxy: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ cert_reqs: int | str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ server_hostname: str | None = None,
+ ssl_context: typing.Any | None = None,
+ ca_certs: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ssl_minimum_version: int | None = None,
+ ssl_maximum_version: int | None = None,
+ ssl_version: int | str | None = None, # Deprecated
+ cert_file: str | None = None,
+ key_file: str | None = None,
+ key_password: str | None = None,
+ ) -> None:
+ super().__init__(
+ host,
+ port=port,
+ timeout=timeout,
+ source_address=source_address,
+ blocksize=blocksize,
+ socket_options=socket_options,
+ proxy=proxy,
+ proxy_config=proxy_config,
+ )
+ self.scheme = "https"
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+ self.ssl_context = ssl_context
+ self.server_hostname = server_hostname
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ssl_version = ssl_version
+ self.ssl_minimum_version = ssl_minimum_version
+ self.ssl_maximum_version = ssl_maximum_version
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+ self.cert_reqs = None
+
+ # The browser will automatically verify all requests.
+ # We have no control over that setting.
+ self.is_verified = True
+
+ def set_cert(
+ self,
+ key_file: str | None = None,
+ cert_file: str | None = None,
+ cert_reqs: int | str | None = None,
+ key_password: str | None = None,
+ ca_certs: str | None = None,
+ assert_hostname: None | str | typing.Literal[False] = None,
+ assert_fingerprint: str | None = None,
+ ca_cert_dir: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ ) -> None:
+ pass
+
+
+# verify that this class implements BaseHTTP(s) connection correctly
+if typing.TYPE_CHECKING:
+ _supports_http_protocol: BaseHTTPConnection = EmscriptenHTTPConnection("", 0)
+ _supports_https_protocol: BaseHTTPSConnection = EmscriptenHTTPSConnection("", 0)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js"
new file mode 100644
index 0000000..faf141e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js"
@@ -0,0 +1,110 @@
+let Status = {
+ SUCCESS_HEADER: -1,
+ SUCCESS_EOF: -2,
+ ERROR_TIMEOUT: -3,
+ ERROR_EXCEPTION: -4,
+};
+
+let connections = new Map();
+let nextConnectionID = 1;
+const encoder = new TextEncoder();
+
+self.addEventListener("message", async function (event) {
+ if (event.data.close) {
+ let connectionID = event.data.close;
+ connections.delete(connectionID);
+ return;
+ } else if (event.data.getMore) {
+ let connectionID = event.data.getMore;
+ let { curOffset, value, reader, intBuffer, byteBuffer } =
+ connections.get(connectionID);
+ // if we still have some in buffer, then just send it back straight away
+ if (!value || curOffset >= value.length) {
+ // read another buffer if required
+ try {
+ let readResponse = await reader.read();
+
+ if (readResponse.done) {
+ // read everything - clear connection and return
+ connections.delete(connectionID);
+ Atomics.store(intBuffer, 0, Status.SUCCESS_EOF);
+ Atomics.notify(intBuffer, 0);
+ // finished reading successfully
+ // return from event handler
+ return;
+ }
+ curOffset = 0;
+ connections.get(connectionID).value = readResponse.value;
+ value = readResponse.value;
+ } catch (error) {
+ console.log("Request exception:", error);
+ let errorBytes = encoder.encode(error.message);
+ let written = errorBytes.length;
+ byteBuffer.set(errorBytes);
+ intBuffer[1] = written;
+ Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
+ Atomics.notify(intBuffer, 0);
+ }
+ }
+
+ // send as much buffer as we can
+ let curLen = value.length - curOffset;
+ if (curLen > byteBuffer.length) {
+ curLen = byteBuffer.length;
+ }
+ byteBuffer.set(value.subarray(curOffset, curOffset + curLen), 0);
+
+ Atomics.store(intBuffer, 0, curLen); // store current length in bytes
+ Atomics.notify(intBuffer, 0);
+ curOffset += curLen;
+ connections.get(connectionID).curOffset = curOffset;
+
+ return;
+ } else {
+ // start fetch
+ let connectionID = nextConnectionID;
+ nextConnectionID += 1;
+ const intBuffer = new Int32Array(event.data.buffer);
+ const byteBuffer = new Uint8Array(event.data.buffer, 8);
+ try {
+ const response = await fetch(event.data.url, event.data.fetchParams);
+ // return the headers first via textencoder
+ var headers = [];
+ for (const pair of response.headers.entries()) {
+ headers.push([pair[0], pair[1]]);
+ }
+ let headerObj = {
+ headers: headers,
+ status: response.status,
+ connectionID,
+ };
+ const headerText = JSON.stringify(headerObj);
+ let headerBytes = encoder.encode(headerText);
+ let written = headerBytes.length;
+ byteBuffer.set(headerBytes);
+ intBuffer[1] = written;
+ // make a connection
+ connections.set(connectionID, {
+ reader: response.body.getReader(),
+ intBuffer: intBuffer,
+ byteBuffer: byteBuffer,
+ value: undefined,
+ curOffset: 0,
+ });
+ // set header ready
+ Atomics.store(intBuffer, 0, Status.SUCCESS_HEADER);
+ Atomics.notify(intBuffer, 0);
+ // all fetching after this goes through a new postmessage call with getMore
+ // this allows for parallel requests
+ } catch (error) {
+ console.log("Request exception:", error);
+ let errorBytes = encoder.encode(error.message);
+ let written = errorBytes.length;
+ byteBuffer.set(errorBytes);
+ intBuffer[1] = written;
+ Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION);
+ Atomics.notify(intBuffer, 0);
+ }
+ }
+});
+self.postMessage({ inited: true });
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/fetch.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/fetch.py"
new file mode 100644
index 0000000..612cfdd
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/fetch.py"
@@ -0,0 +1,726 @@
+"""
+Support for streaming http requests in emscripten.
+
+A few caveats -
+
+If your browser (or Node.js) has WebAssembly JavaScript Promise Integration enabled
+https://github.com/WebAssembly/js-promise-integration/blob/main/proposals/js-promise-integration/Overview.md
+*and* you launch pyodide using `pyodide.runPythonAsync`, this will fetch data using the
+JavaScript asynchronous fetch api (wrapped via `pyodide.ffi.call_sync`). In this case
+timeouts and streaming should just work.
+
+Otherwise, it uses a combination of XMLHttpRequest and a web-worker for streaming.
+
+This approach has several caveats:
+
+Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed.
+Streaming only works if you're running pyodide in a web worker.
+
+Secondly, this uses an extra web worker and SharedArrayBuffer to do the asynchronous fetch
+operation, so it requires that you have crossOriginIsolation enabled, by serving over https
+(or from localhost) with the two headers below set:
+
+ Cross-Origin-Opener-Policy: same-origin
+ Cross-Origin-Embedder-Policy: require-corp
+
+You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in
+JavaScript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole
+request into a buffer and then returning it. it shows a warning in the JavaScript console in this case.
+
+Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once
+control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch.
+
+NB: in this code, there are a lot of JavaScript objects. They are named js_*
+to make it clear what type of object they are.
+"""
+
+from __future__ import annotations
+
+import io
+import json
+from email.parser import Parser
+from importlib.resources import files
+from typing import TYPE_CHECKING, Any
+
+import js # type: ignore[import-not-found]
+from pyodide.ffi import ( # type: ignore[import-not-found]
+ JsArray,
+ JsException,
+ JsProxy,
+ to_js,
+)
+
+if TYPE_CHECKING:
+ from typing_extensions import Buffer
+
+from .request import EmscriptenRequest
+from .response import EmscriptenResponse
+
+"""
+There are some headers that trigger unintended CORS preflight requests.
+See also https://github.com/koenvo/pyodide-http/issues/22
+"""
+HEADERS_TO_IGNORE = ("user-agent",)
+
+SUCCESS_HEADER = -1
+SUCCESS_EOF = -2
+ERROR_TIMEOUT = -3
+ERROR_EXCEPTION = -4
+
+
+class _RequestError(Exception):
+ def __init__(
+ self,
+ message: str | None = None,
+ *,
+ request: EmscriptenRequest | None = None,
+ response: EmscriptenResponse | None = None,
+ ):
+ self.request = request
+ self.response = response
+ self.message = message
+ super().__init__(self.message)
+
+
+class _StreamingError(_RequestError):
+ pass
+
+
+class _TimeoutError(_RequestError):
+ pass
+
+
+def _obj_from_dict(dict_val: dict[str, Any]) -> JsProxy:
+ return to_js(dict_val, dict_converter=js.Object.fromEntries)
+
+
+class _ReadStream(io.RawIOBase):
+ def __init__(
+ self,
+ int_buffer: JsArray,
+ byte_buffer: JsArray,
+ timeout: float,
+ worker: JsProxy,
+ connection_id: int,
+ request: EmscriptenRequest,
+ ):
+ self.int_buffer = int_buffer
+ self.byte_buffer = byte_buffer
+ self.read_pos = 0
+ self.read_len = 0
+ self.connection_id = connection_id
+ self.worker = worker
+ self.timeout = int(1000 * timeout) if timeout > 0 else None
+ self.is_live = True
+ self._is_closed = False
+ self.request: EmscriptenRequest | None = request
+
+ def __del__(self) -> None:
+ self.close()
+
+ # this is compatible with _base_connection
+ def is_closed(self) -> bool:
+ return self._is_closed
+
+ # for compatibility with RawIOBase
+ @property
+ def closed(self) -> bool:
+ return self.is_closed()
+
+ def close(self) -> None:
+ if self.is_closed():
+ return
+ self.read_len = 0
+ self.read_pos = 0
+ self.int_buffer = None
+ self.byte_buffer = None
+ self._is_closed = True
+ self.request = None
+ if self.is_live:
+ self.worker.postMessage(_obj_from_dict({"close": self.connection_id}))
+ self.is_live = False
+ super().close()
+
+ def readable(self) -> bool:
+ return True
+
+ def writable(self) -> bool:
+ return False
+
+ def seekable(self) -> bool:
+ return False
+
+ def readinto(self, byte_obj: Buffer) -> int:
+ if not self.int_buffer:
+ raise _StreamingError(
+ "No buffer for stream in _ReadStream.readinto",
+ request=self.request,
+ response=None,
+ )
+ if self.read_len == 0:
+ # wait for the worker to send something
+ js.Atomics.store(self.int_buffer, 0, ERROR_TIMEOUT)
+ self.worker.postMessage(_obj_from_dict({"getMore": self.connection_id}))
+ if (
+ js.Atomics.wait(self.int_buffer, 0, ERROR_TIMEOUT, self.timeout)
+ == "timed-out"
+ ):
+ raise _TimeoutError
+ data_len = self.int_buffer[0]
+ if data_len > 0:
+ self.read_len = data_len
+ self.read_pos = 0
+ elif data_len == ERROR_EXCEPTION:
+ string_len = self.int_buffer[1]
+ # decode the error string
+ js_decoder = js.TextDecoder.new()
+ json_str = js_decoder.decode(self.byte_buffer.slice(0, string_len))
+ raise _StreamingError(
+ f"Exception thrown in fetch: {json_str}",
+ request=self.request,
+ response=None,
+ )
+ else:
+ # EOF, free the buffers and return zero
+ # and free the request
+ self.is_live = False
+ self.close()
+ return 0
+ # copy from int32array to python bytes
+ ret_length = min(self.read_len, len(memoryview(byte_obj)))
+ subarray = self.byte_buffer.subarray(
+ self.read_pos, self.read_pos + ret_length
+ ).to_py()
+ memoryview(byte_obj)[0:ret_length] = subarray
+ self.read_len -= ret_length
+ self.read_pos += ret_length
+ return ret_length
+
+
+class _StreamingFetcher:
+ def __init__(self) -> None:
+ # make web-worker and data buffer on startup
+ self.streaming_ready = False
+ streaming_worker_code = (
+ files(__package__)
+ .joinpath("emscripten_fetch_worker.js")
+ .read_text(encoding="utf-8")
+ )
+ js_data_blob = js.Blob.new(
+ to_js([streaming_worker_code], create_pyproxies=False),
+ _obj_from_dict({"type": "application/javascript"}),
+ )
+
+ def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None:
+ def onMsg(e: JsProxy) -> None:
+ self.streaming_ready = True
+ js_resolve_fn(e)
+
+ def onErr(e: JsProxy) -> None:
+ js_reject_fn(e) # Defensive: never happens in ci
+
+ self.js_worker.onmessage = onMsg
+ self.js_worker.onerror = onErr
+
+ js_data_url = js.URL.createObjectURL(js_data_blob)
+ self.js_worker = js.globalThis.Worker.new(js_data_url)
+ self.js_worker_ready_promise = js.globalThis.Promise.new(promise_resolver)
+
+ def send(self, request: EmscriptenRequest) -> EmscriptenResponse:
+ headers = {
+ k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE
+ }
+
+ body = request.body
+ fetch_data = {"headers": headers, "body": to_js(body), "method": request.method}
+ # start the request off in the worker
+ timeout = int(1000 * request.timeout) if request.timeout > 0 else None
+ js_shared_buffer = js.SharedArrayBuffer.new(1048576)
+ js_int_buffer = js.Int32Array.new(js_shared_buffer)
+ js_byte_buffer = js.Uint8Array.new(js_shared_buffer, 8)
+
+ js.Atomics.store(js_int_buffer, 0, ERROR_TIMEOUT)
+ js.Atomics.notify(js_int_buffer, 0)
+ js_absolute_url = js.URL.new(request.url, js.location).href
+ self.js_worker.postMessage(
+ _obj_from_dict(
+ {
+ "buffer": js_shared_buffer,
+ "url": js_absolute_url,
+ "fetchParams": fetch_data,
+ }
+ )
+ )
+ # wait for the worker to send something
+ js.Atomics.wait(js_int_buffer, 0, ERROR_TIMEOUT, timeout)
+ if js_int_buffer[0] == ERROR_TIMEOUT:
+ raise _TimeoutError(
+ "Timeout connecting to streaming request",
+ request=request,
+ response=None,
+ )
+ elif js_int_buffer[0] == SUCCESS_HEADER:
+ # got response
+ # header length is in second int of intBuffer
+ string_len = js_int_buffer[1]
+ # decode the rest to a JSON string
+ js_decoder = js.TextDecoder.new()
+ # this does a copy (the slice) because decode can't work on shared array
+ # for some silly reason
+ json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
+ # get it as an object
+ response_obj = json.loads(json_str)
+ return EmscriptenResponse(
+ request=request,
+ status_code=response_obj["status"],
+ headers=response_obj["headers"],
+ body=_ReadStream(
+ js_int_buffer,
+ js_byte_buffer,
+ request.timeout,
+ self.js_worker,
+ response_obj["connectionID"],
+ request,
+ ),
+ )
+ elif js_int_buffer[0] == ERROR_EXCEPTION:
+ string_len = js_int_buffer[1]
+ # decode the error string
+ js_decoder = js.TextDecoder.new()
+ json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len))
+ raise _StreamingError(
+ f"Exception thrown in fetch: {json_str}", request=request, response=None
+ )
+ else:
+ raise _StreamingError(
+ f"Unknown status from worker in fetch: {js_int_buffer[0]}",
+ request=request,
+ response=None,
+ )
+
+
+class _JSPIReadStream(io.RawIOBase):
+ """
+ A read stream that uses pyodide.ffi.run_sync to read from a JavaScript fetch
+ response. This requires support for WebAssembly JavaScript Promise Integration
+ in the containing browser, and for pyodide to be launched via runPythonAsync.
+
+ :param js_read_stream:
+ The JavaScript stream reader
+
+ :param timeout:
+ Timeout in seconds
+
+ :param request:
+ The request we're handling
+
+ :param response:
+ The response this stream relates to
+
+ :param js_abort_controller:
+ A JavaScript AbortController object, used for timeouts
+ """
+
+ def __init__(
+ self,
+ js_read_stream: Any,
+ timeout: float,
+ request: EmscriptenRequest,
+ response: EmscriptenResponse,
+ js_abort_controller: Any, # JavaScript AbortController for timeouts
+ ):
+ self.js_read_stream = js_read_stream
+ self.timeout = timeout
+ self._is_closed = False
+ self._is_done = False
+ self.request: EmscriptenRequest | None = request
+ self.response: EmscriptenResponse | None = response
+ self.current_buffer = None
+ self.current_buffer_pos = 0
+ self.js_abort_controller = js_abort_controller
+
+ def __del__(self) -> None:
+ self.close()
+
+ # this is compatible with _base_connection
+ def is_closed(self) -> bool:
+ return self._is_closed
+
+ # for compatibility with RawIOBase
+ @property
+ def closed(self) -> bool:
+ return self.is_closed()
+
+ def close(self) -> None:
+ if self.is_closed():
+ return
+ self.read_len = 0
+ self.read_pos = 0
+ self.js_read_stream.cancel()
+ self.js_read_stream = None
+ self._is_closed = True
+ self._is_done = True
+ self.request = None
+ self.response = None
+ super().close()
+
+ def readable(self) -> bool:
+ return True
+
+ def writable(self) -> bool:
+ return False
+
+ def seekable(self) -> bool:
+ return False
+
+ def _get_next_buffer(self) -> bool:
+ result_js = _run_sync_with_timeout(
+ self.js_read_stream.read(),
+ self.timeout,
+ self.js_abort_controller,
+ request=self.request,
+ response=self.response,
+ )
+ if result_js.done:
+ self._is_done = True
+ return False
+ else:
+ self.current_buffer = result_js.value.to_py()
+ self.current_buffer_pos = 0
+ return True
+
+ def readinto(self, byte_obj: Buffer) -> int:
+ if self.current_buffer is None:
+ if not self._get_next_buffer() or self.current_buffer is None:
+ self.close()
+ return 0
+ ret_length = min(
+ len(byte_obj), len(self.current_buffer) - self.current_buffer_pos
+ )
+ byte_obj[0:ret_length] = self.current_buffer[
+ self.current_buffer_pos : self.current_buffer_pos + ret_length
+ ]
+ self.current_buffer_pos += ret_length
+ if self.current_buffer_pos == len(self.current_buffer):
+ self.current_buffer = None
+ return ret_length
+
+
+# check if we are in a worker or not
+def is_in_browser_main_thread() -> bool:
+ return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window
+
+
+def is_cross_origin_isolated() -> bool:
+ return hasattr(js, "crossOriginIsolated") and js.crossOriginIsolated
+
+
+def is_in_node() -> bool:
+ return (
+ hasattr(js, "process")
+ and hasattr(js.process, "release")
+ and hasattr(js.process.release, "name")
+ and js.process.release.name == "node"
+ )
+
+
+def is_worker_available() -> bool:
+ return hasattr(js, "Worker") and hasattr(js, "Blob")
+
+
+_fetcher: _StreamingFetcher | None = None
+
+if is_worker_available() and (
+ (is_cross_origin_isolated() and not is_in_browser_main_thread())
+ and (not is_in_node())
+):
+ _fetcher = _StreamingFetcher()
+else:
+ _fetcher = None
+
+
+NODE_JSPI_ERROR = (
+ "urllib3 only works in Node.js with pyodide.runPythonAsync"
+ " and requires the flag --experimental-wasm-stack-switching in "
+ " versions of node <24."
+)
+
+
+def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None:
+ if has_jspi():
+ return send_jspi_request(request, True)
+ elif is_in_node():
+ raise _RequestError(
+ message=NODE_JSPI_ERROR,
+ request=request,
+ response=None,
+ )
+
+ if _fetcher and streaming_ready():
+ return _fetcher.send(request)
+ else:
+ _show_streaming_warning()
+ return None
+
+
+_SHOWN_TIMEOUT_WARNING = False
+
+
+def _show_timeout_warning() -> None:
+ global _SHOWN_TIMEOUT_WARNING
+ if not _SHOWN_TIMEOUT_WARNING:
+ _SHOWN_TIMEOUT_WARNING = True
+ message = "Warning: Timeout is not available on main browser thread"
+ js.console.warn(message)
+
+
+_SHOWN_STREAMING_WARNING = False
+
+
+def _show_streaming_warning() -> None:
+ global _SHOWN_STREAMING_WARNING
+ if not _SHOWN_STREAMING_WARNING:
+ _SHOWN_STREAMING_WARNING = True
+ message = "Can't stream HTTP requests because: \n"
+ if not is_cross_origin_isolated():
+ message += " Page is not cross-origin isolated\n"
+ if is_in_browser_main_thread():
+ message += " Python is running in main browser thread\n"
+ if not is_worker_available():
+ message += " Worker or Blob classes are not available in this environment." # Defensive: this is always False in browsers that we test in
+ if streaming_ready() is False:
+ message += """ Streaming fetch worker isn't ready. If you want to be sure that streaming fetch
+is working, you need to call: 'await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()`"""
+ from js import console
+
+ console.warn(message)
+
+
+def send_request(request: EmscriptenRequest) -> EmscriptenResponse:
+ if has_jspi():
+ return send_jspi_request(request, False)
+ elif is_in_node():
+ raise _RequestError(
+ message=NODE_JSPI_ERROR,
+ request=request,
+ response=None,
+ )
+ try:
+ js_xhr = js.XMLHttpRequest.new()
+
+ if not is_in_browser_main_thread():
+ js_xhr.responseType = "arraybuffer"
+ if request.timeout:
+ js_xhr.timeout = int(request.timeout * 1000)
+ else:
+ js_xhr.overrideMimeType("text/plain; charset=ISO-8859-15")
+ if request.timeout:
+ # timeout isn't available on the main thread - show a warning in console
+ # if it is set
+ _show_timeout_warning()
+
+ js_xhr.open(request.method, request.url, False)
+ for name, value in request.headers.items():
+ if name.lower() not in HEADERS_TO_IGNORE:
+ js_xhr.setRequestHeader(name, value)
+
+ js_xhr.send(to_js(request.body))
+
+ headers = dict(Parser().parsestr(js_xhr.getAllResponseHeaders()))
+
+ if not is_in_browser_main_thread():
+ body = js_xhr.response.to_py().tobytes()
+ else:
+ body = js_xhr.response.encode("ISO-8859-15")
+ return EmscriptenResponse(
+ status_code=js_xhr.status, headers=headers, body=body, request=request
+ )
+ except JsException as err:
+ if err.name == "TimeoutError":
+ raise _TimeoutError(err.message, request=request)
+ elif err.name == "NetworkError":
+ raise _RequestError(err.message, request=request)
+ else:
+ # general http error
+ raise _RequestError(err.message, request=request)
+
+
+def send_jspi_request(
+ request: EmscriptenRequest, streaming: bool
+) -> EmscriptenResponse:
+ """
+ Send a request using WebAssembly JavaScript Promise Integration
+ to wrap the asynchronous JavaScript fetch api (experimental).
+
+ :param request:
+ Request to send
+
+ :param streaming:
+ Whether to stream the response
+
+ :return: The response object
+ :rtype: EmscriptenResponse
+ """
+ timeout = request.timeout
+ js_abort_controller = js.AbortController.new()
+ headers = {k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE}
+ req_body = request.body
+ fetch_data = {
+ "headers": headers,
+ "body": to_js(req_body),
+ "method": request.method,
+ "signal": js_abort_controller.signal,
+ }
+ # Node.js returns the whole response (unlike opaqueredirect in browsers),
+ # so urllib3 can set `redirect: manual` to control redirects itself.
+ # https://stackoverflow.com/a/78524615
+ if _is_node_js():
+ fetch_data["redirect"] = "manual"
+ # Call JavaScript fetch (async api, returns a promise)
+ fetcher_promise_js = js.fetch(request.url, _obj_from_dict(fetch_data))
+ # Now suspend WebAssembly until we resolve that promise
+ # or time out.
+ response_js = _run_sync_with_timeout(
+ fetcher_promise_js,
+ timeout,
+ js_abort_controller,
+ request=request,
+ response=None,
+ )
+ headers = {}
+ header_iter = response_js.headers.entries()
+ while True:
+ iter_value_js = header_iter.next()
+ if getattr(iter_value_js, "done", False):
+ break
+ else:
+ headers[str(iter_value_js.value[0])] = str(iter_value_js.value[1])
+ status_code = response_js.status
+ body: bytes | io.RawIOBase = b""
+
+ response = EmscriptenResponse(
+ status_code=status_code, headers=headers, body=b"", request=request
+ )
+ if streaming:
+ # get via inputstream
+ if response_js.body is not None:
+ # get a reader from the fetch response
+ body_stream_js = response_js.body.getReader()
+ body = _JSPIReadStream(
+ body_stream_js, timeout, request, response, js_abort_controller
+ )
+ else:
+ # get directly via arraybuffer
+ # n.b. this is another async JavaScript call.
+ body = _run_sync_with_timeout(
+ response_js.arrayBuffer(),
+ timeout,
+ js_abort_controller,
+ request=request,
+ response=response,
+ ).to_py()
+ response.body = body
+ return response
+
+
+def _run_sync_with_timeout(
+ promise: Any,
+ timeout: float,
+ js_abort_controller: Any,
+ request: EmscriptenRequest | None,
+ response: EmscriptenResponse | None,
+) -> Any:
+ """
+ Await a JavaScript promise synchronously with a timeout which is implemented
+ via the AbortController
+
+ :param promise:
+ Javascript promise to await
+
+ :param timeout:
+ Timeout in seconds
+
+ :param js_abort_controller:
+ A JavaScript AbortController object, used on timeout
+
+ :param request:
+ The request being handled
+
+ :param response:
+ The response being handled (if it exists yet)
+
+ :raises _TimeoutError: If the request times out
+ :raises _RequestError: If the request raises a JavaScript exception
+
+ :return: The result of awaiting the promise.
+ """
+ timer_id = None
+ if timeout > 0:
+ timer_id = js.setTimeout(
+ js_abort_controller.abort.bind(js_abort_controller), int(timeout * 1000)
+ )
+ try:
+ from pyodide.ffi import run_sync
+
+ # run_sync here uses WebAssembly JavaScript Promise Integration to
+ # suspend python until the JavaScript promise resolves.
+ return run_sync(promise)
+ except JsException as err:
+ if err.name == "AbortError":
+ raise _TimeoutError(
+ message="Request timed out", request=request, response=response
+ )
+ else:
+ raise _RequestError(message=err.message, request=request, response=response)
+ finally:
+ if timer_id is not None:
+ js.clearTimeout(timer_id)
+
+
+def has_jspi() -> bool:
+ """
+ Return true if jspi can be used.
+
+ This requires both browser support and also WebAssembly
+ to be in the correct state - i.e. that the javascript
+ call into python was async not sync.
+
+ :return: True if jspi can be used.
+ :rtype: bool
+ """
+ try:
+ from pyodide.ffi import can_run_sync, run_sync # noqa: F401
+
+ return bool(can_run_sync())
+ except ImportError:
+ return False
+
+
+def _is_node_js() -> bool:
+ """
+ Check if we are in Node.js.
+
+ :return: True if we are in Node.js.
+ :rtype: bool
+ """
+ return (
+ hasattr(js, "process")
+ and hasattr(js.process, "release")
+ # According to the Node.js documentation, the release name is always "node".
+ and js.process.release.name == "node"
+ )
+
+
+def streaming_ready() -> bool | None:
+ if _fetcher:
+ return _fetcher.streaming_ready
+ else:
+ return None # no fetcher, return None to signify that
+
+
+async def wait_for_streaming_ready() -> bool:
+ if _fetcher:
+ await _fetcher.js_worker_ready_promise
+ return True
+ else:
+ return False
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/request.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/request.py"
new file mode 100644
index 0000000..e692e69
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/request.py"
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+
+from ..._base_connection import _TYPE_BODY
+
+
+@dataclass
+class EmscriptenRequest:
+ method: str
+ url: str
+ params: dict[str, str] | None = None
+ body: _TYPE_BODY | None = None
+ headers: dict[str, str] = field(default_factory=dict)
+ timeout: float = 0
+ decode_content: bool = True
+
+ def set_header(self, name: str, value: str) -> None:
+ self.headers[name.capitalize()] = value
+
+ def set_body(self, body: _TYPE_BODY | None) -> None:
+ self.body = body
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/response.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/response.py"
new file mode 100644
index 0000000..cb1088a
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/emscripten/response.py"
@@ -0,0 +1,277 @@
+from __future__ import annotations
+
+import json as _json
+import logging
+import typing
+from contextlib import contextmanager
+from dataclasses import dataclass
+from http.client import HTTPException as HTTPException
+from io import BytesIO, IOBase
+
+from ...exceptions import InvalidHeader, TimeoutError
+from ...response import BaseHTTPResponse
+from ...util.retry import Retry
+from .request import EmscriptenRequest
+
+if typing.TYPE_CHECKING:
+ from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection
+
+log = logging.getLogger(__name__)
+
+
+@dataclass
+class EmscriptenResponse:
+ status_code: int
+ headers: dict[str, str]
+ body: IOBase | bytes
+ request: EmscriptenRequest
+
+
+class EmscriptenHttpResponseWrapper(BaseHTTPResponse):
+ def __init__(
+ self,
+ internal_response: EmscriptenResponse,
+ url: str | None = None,
+ connection: BaseHTTPConnection | BaseHTTPSConnection | None = None,
+ ):
+ self._pool = None # set by pool class
+ self._body = None
+ self._response = internal_response
+ self._url = url
+ self._connection = connection
+ self._closed = False
+ super().__init__(
+ headers=internal_response.headers,
+ status=internal_response.status_code,
+ request_url=url,
+ version=0,
+ version_string="HTTP/?",
+ reason="",
+ decode_content=True,
+ )
+ self.length_remaining = self._init_length(self._response.request.method)
+ self.length_is_certain = False
+
+ @property
+ def url(self) -> str | None:
+ return self._url
+
+ @url.setter
+ def url(self, url: str | None) -> None:
+ self._url = url
+
+ @property
+ def connection(self) -> BaseHTTPConnection | BaseHTTPSConnection | None:
+ return self._connection
+
+ @property
+ def retries(self) -> Retry | None:
+ return self._retries
+
+ @retries.setter
+ def retries(self, retries: Retry | None) -> None:
+ # Override the request_url if retries has a redirect location.
+ self._retries = retries
+
+ def stream(
+ self, amt: int | None = 2**16, decode_content: bool | None = None
+ ) -> typing.Generator[bytes]:
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ while True:
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+ else:
+ break
+
+ def _init_length(self, request_method: str | None) -> int | None:
+ length: int | None
+ content_length: str | None = self.headers.get("content-length")
+
+ if content_length is not None:
+ try:
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
+ # be sent in a single Content-Length header
+ # (e.g. Content-Length: 42, 42). This line ensures the values
+ # are all valid ints and that as long as the `set` length is 1,
+ # all values are the same. Otherwise, the header is invalid.
+ lengths = {int(val) for val in content_length.split(",")}
+ if len(lengths) > 1:
+ raise InvalidHeader(
+ "Content-Length contained multiple "
+ "unmatching values (%s)" % content_length
+ )
+ length = lengths.pop()
+ except ValueError:
+ length = None
+ else:
+ if length < 0:
+ length = None
+
+ else: # if content_length is None
+ length = None
+
+ # Check for responses that shouldn't include a body
+ if (
+ self.status in (204, 304)
+ or 100 <= self.status < 200
+ or request_method == "HEAD"
+ ):
+ length = 0
+
+ return length
+
+ def read(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None, # ignored because browser decodes always
+ cache_content: bool = False,
+ ) -> bytes:
+ if (
+ self._closed
+ or self._response is None
+ or (isinstance(self._response.body, IOBase) and self._response.body.closed)
+ ):
+ return b""
+
+ with self._error_catcher():
+ # body has been preloaded as a string by XmlHttpRequest
+ if not isinstance(self._response.body, IOBase):
+ self.length_remaining = len(self._response.body)
+ self.length_is_certain = True
+ # wrap body in IOStream
+ self._response.body = BytesIO(self._response.body)
+ if amt is not None and amt >= 0:
+ # don't cache partial content
+ cache_content = False
+ data = self._response.body.read(amt)
+ else: # read all we can (and cache it)
+ data = self._response.body.read()
+ if cache_content:
+ self._body = data
+ if self.length_remaining is not None:
+ self.length_remaining = max(self.length_remaining - len(data), 0)
+ if len(data) == 0 or (
+ self.length_is_certain and self.length_remaining == 0
+ ):
+ # definitely finished reading, close response stream
+ self._response.body.close()
+ return typing.cast(bytes, data)
+
+ def read_chunked(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ ) -> typing.Generator[bytes]:
+ # chunked is handled by browser
+ while True:
+ bytes = self.read(amt, decode_content)
+ if not bytes:
+ break
+ yield bytes
+
+ def release_conn(self) -> None:
+ if not self._pool or not self._connection:
+ return None
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ def drain_conn(self) -> None:
+ self.close()
+
+ @property
+ def data(self) -> bytes:
+ if self._body:
+ return self._body
+ else:
+ return self.read(cache_content=True)
+
+ def json(self) -> typing.Any:
+ """
+ Deserializes the body of the HTTP response as a Python object.
+
+ The body of the HTTP response must be encoded using UTF-8, as per
+ `RFC 8529 Section 8.1 <https://www.rfc-editor.org/rfc/rfc8259#section-8.1>`_.
+
+ To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to
+ your custom decoder instead.
+
+ If the body of the HTTP response is not decodable to UTF-8, a
+ `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a
+ valid JSON document, a `json.JSONDecodeError` will be raised.
+
+ Read more :ref:`here <json_content>`.
+
+ :returns: The body of the HTTP response as a Python object.
+ """
+ data = self.data.decode("utf-8")
+ return _json.loads(data)
+
+ def close(self) -> None:
+ if not self._closed:
+ if isinstance(self._response.body, IOBase):
+ self._response.body.close()
+ if self._connection:
+ self._connection.close()
+ self._connection = None
+ self._closed = True
+
+ @contextmanager
+ def _error_catcher(self) -> typing.Generator[None]:
+ """
+ Catch Emscripten specific exceptions thrown by fetch.py,
+ instead re-raising urllib3 variants, so that low-level exceptions
+ are not leaked in the high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ from .fetch import _RequestError, _TimeoutError # avoid circular import
+
+ clean_exit = False
+
+ try:
+ yield
+ # If no exception is thrown, we should avoid cleaning up
+ # unnecessarily.
+ clean_exit = True
+ except _TimeoutError as e:
+ raise TimeoutError(str(e))
+ except _RequestError as e:
+ raise HTTPException(str(e))
+ finally:
+ # If we didn't terminate cleanly, we need to throw away our
+ # connection.
+ if not clean_exit:
+ # The response may not be closed but we're not going to use it
+ # anymore so close it now
+ if (
+ isinstance(self._response.body, IOBase)
+ and not self._response.body.closed
+ ):
+ self._response.body.close()
+ # release the connection back to the pool
+ self.release_conn()
+ else:
+ # If we have read everything from the response stream,
+ # return the connection back to the pool.
+ if (
+ isinstance(self._response.body, IOBase)
+ and self._response.body.closed
+ ):
+ self.release_conn()
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/pyopenssl.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/pyopenssl.py"
new file mode 100644
index 0000000..8e05d3d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/pyopenssl.py"
@@ -0,0 +1,564 @@
+"""
+Module for using pyOpenSSL as a TLS backend. This module was relevant before
+the standard library ``ssl`` module supported SNI, but now that we've dropped
+support for Python 2.7 all relevant Python versions support SNI so
+**this module is no longer recommended**.
+
+This needs the following packages installed:
+
+* `pyOpenSSL`_ (tested with 16.0.0)
+* `cryptography`_ (minimum 1.3.4, from pyopenssl)
+* `idna`_ (minimum 2.0)
+
+However, pyOpenSSL depends on cryptography, so while we use all three directly here we
+end up having relatively few packages required.
+
+You can install them with the following command:
+
+.. code-block:: bash
+
+ $ python -m pip install pyopenssl cryptography idna
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this:
+
+.. code-block:: python
+
+ try:
+ import urllib3.contrib.pyopenssl
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ except ImportError:
+ pass
+
+.. _pyopenssl: https://www.pyopenssl.org
+.. _cryptography: https://cryptography.io
+.. _idna: https://github.com/kjd/idna
+"""
+
+from __future__ import annotations
+
+import OpenSSL.SSL # type: ignore[import-not-found]
+from cryptography import x509
+
+try:
+ from cryptography.x509 import UnsupportedExtension # type: ignore[attr-defined]
+except ImportError:
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
+ class UnsupportedExtension(Exception): # type: ignore[no-redef]
+ pass
+
+
+import logging
+import ssl
+import typing
+from io import BytesIO
+from socket import socket as socket_cls
+from socket import timeout
+
+from .. import util
+
+if typing.TYPE_CHECKING:
+ from OpenSSL.crypto import X509 # type: ignore[import-not-found]
+
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions: dict[int, int] = {
+ util.ssl_.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined]
+ util.ssl_.PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined]
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+
+_stdlib_to_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+_openssl_to_stdlib_verify = {v: k for k, v in _stdlib_to_openssl_verify.items()}
+
+# The SSLvX values are the most likely to be missing in the future
+# but we check them all just to be sure.
+_OP_NO_SSLv2_OR_SSLv3: int = getattr(OpenSSL.SSL, "OP_NO_SSLv2", 0) | getattr(
+ OpenSSL.SSL, "OP_NO_SSLv3", 0
+)
+_OP_NO_TLSv1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1", 0)
+_OP_NO_TLSv1_1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_1", 0)
+_OP_NO_TLSv1_2: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_2", 0)
+_OP_NO_TLSv1_3: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_3", 0)
+
+_openssl_to_ssl_minimum_version: dict[int, int] = {
+ ssl.TLSVersion.MINIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3,
+ ssl.TLSVersion.TLSv1: _OP_NO_SSLv2_OR_SSLv3,
+ ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1,
+ ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1,
+ ssl.TLSVersion.TLSv1_3: (
+ _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2
+ ),
+ ssl.TLSVersion.MAXIMUM_SUPPORTED: (
+ _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2
+ ),
+}
+_openssl_to_ssl_maximum_version: dict[int, int] = {
+ ssl.TLSVersion.MINIMUM_SUPPORTED: (
+ _OP_NO_SSLv2_OR_SSLv3
+ | _OP_NO_TLSv1
+ | _OP_NO_TLSv1_1
+ | _OP_NO_TLSv1_2
+ | _OP_NO_TLSv1_3
+ ),
+ ssl.TLSVersion.TLSv1: (
+ _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3
+ ),
+ ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3,
+ ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_3,
+ ssl.TLSVersion.TLSv1_3: _OP_NO_SSLv2_OR_SSLv3,
+ ssl.TLSVersion.MAXIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3,
+}
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_SSLContext = util.ssl_.SSLContext
+
+
+log = logging.getLogger(__name__)
+
+
+def inject_into_urllib3() -> None:
+ "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
+
+ _validate_dependencies_met()
+
+ util.SSLContext = PyOpenSSLContext # type: ignore[assignment]
+ util.ssl_.SSLContext = PyOpenSSLContext # type: ignore[assignment]
+ util.IS_PYOPENSSL = True
+ util.ssl_.IS_PYOPENSSL = True
+
+
+def extract_from_urllib3() -> None:
+ "Undo monkey-patching by :func:`inject_into_urllib3`."
+
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.IS_PYOPENSSL = False
+ util.ssl_.IS_PYOPENSSL = False
+
+
+def _validate_dependencies_met() -> None:
+ """
+ Verifies that PyOpenSSL's package-level dependencies have been met.
+ Throws `ImportError` if they are not met.
+ """
+ # Method added in `cryptography==1.1`; not available in older versions
+ from cryptography.x509.extensions import Extensions
+
+ if getattr(Extensions, "get_extension_for_class", None) is None:
+ raise ImportError(
+ "'cryptography' module missing required functionality. "
+ "Try upgrading to v1.3.4 or newer."
+ )
+
+ # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
+ # attribute is only present on those versions.
+ from OpenSSL.crypto import X509
+
+ x509 = X509()
+ if getattr(x509, "_x509", None) is None:
+ raise ImportError(
+ "'pyOpenSSL' module missing required functionality. "
+ "Try upgrading to v0.14 or newer."
+ )
+
+
+def _dnsname_to_stdlib(name: str) -> str | None:
+ """
+ Converts a dNSName SubjectAlternativeName field to the form used by the
+ standard library on the given Python version.
+
+ Cryptography produces a dNSName as a unicode string that was idna-decoded
+ from ASCII bytes. We need to idna-encode that string to get it back, and
+ then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
+ uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
+
+ If the name cannot be idna-encoded then we return None signalling that
+ the name given should be skipped.
+ """
+
+ def idna_encode(name: str) -> bytes | None:
+ """
+ Borrowed wholesale from the Python Cryptography Project. It turns out
+ that we can't just safely call `idna.encode`: it can explode for
+ wildcard names. This avoids that problem.
+ """
+ import idna
+
+ try:
+ for prefix in ["*.", "."]:
+ if name.startswith(prefix):
+ name = name[len(prefix) :]
+ return prefix.encode("ascii") + idna.encode(name)
+ return idna.encode(name)
+ except idna.core.IDNAError:
+ return None
+
+ # Don't send IPv6 addresses through the IDNA encoder.
+ if ":" in name:
+ return name
+
+ encoded_name = idna_encode(name)
+ if encoded_name is None:
+ return None
+ return encoded_name.decode("utf-8")
+
+
+def get_subj_alt_name(peer_cert: X509) -> list[tuple[str, str]]:
+ """
+ Given an PyOpenSSL certificate, provides all the subject alternative names.
+ """
+ cert = peer_cert.to_cryptography()
+
+ # We want to find the SAN extension. Ask Cryptography to locate it (it's
+ # faster than looping in Python)
+ try:
+ ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
+ except x509.ExtensionNotFound:
+ # No such extension, return the empty list.
+ return []
+ except (
+ x509.DuplicateExtension,
+ UnsupportedExtension,
+ x509.UnsupportedGeneralNameType,
+ UnicodeError,
+ ) as e:
+ # A problem has been found with the quality of the certificate. Assume
+ # no SAN field is present.
+ log.warning(
+ "A problem was encountered with the certificate that prevented "
+ "urllib3 from finding the SubjectAlternativeName field. This can "
+ "affect certificate validation. The error was %s",
+ e,
+ )
+ return []
+
+ # We want to return dNSName and iPAddress fields. We need to cast the IPs
+ # back to strings because the match_hostname function wants them as
+ # strings.
+ # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
+ # decoded. This is pretty frustrating, but that's what the standard library
+ # does with certificates, and so we need to attempt to do the same.
+ # We also want to skip over names which cannot be idna encoded.
+ names = [
+ ("DNS", name)
+ for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
+ if name is not None
+ ]
+ names.extend(
+ ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
+ )
+
+ return names
+
+
+class WrappedSocket:
+ """API-compatibility wrapper for Python OpenSSL's Connection-class."""
+
+ def __init__(
+ self,
+ connection: OpenSSL.SSL.Connection,
+ socket: socket_cls,
+ suppress_ragged_eofs: bool = True,
+ ) -> None:
+ self.connection = connection
+ self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._io_refs = 0
+ self._closed = False
+
+ def fileno(self) -> int:
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self) -> None:
+ if self._io_refs > 0:
+ self._io_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, *args: typing.Any, **kwargs: typing.Any) -> bytes:
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return b""
+ else:
+ raise OSError(e.args[0], str(e)) from e
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b""
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError as e:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out") from e
+ else:
+ return self.recv(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError(f"read error: {e!r}") from e
+ else:
+ return data # type: ignore[no-any-return]
+
+ def recv_into(self, *args: typing.Any, **kwargs: typing.Any) -> int:
+ try:
+ return self.connection.recv_into(*args, **kwargs) # type: ignore[no-any-return]
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return 0
+ else:
+ raise OSError(e.args[0], str(e)) from e
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return 0
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError as e:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out") from e
+ else:
+ return self.recv_into(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError(f"read error: {e!r}") from e
+
+ def settimeout(self, timeout: float) -> None:
+ return self.socket.settimeout(timeout)
+
+ def _send_until_done(self, data: bytes) -> int:
+ while True:
+ try:
+ return self.connection.send(data) # type: ignore[no-any-return]
+ except OpenSSL.SSL.WantWriteError as e:
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
+ raise timeout() from e
+ continue
+ except OpenSSL.SSL.SysCallError as e:
+ raise OSError(e.args[0], str(e)) from e
+
+ def sendall(self, data: bytes) -> None:
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self._send_until_done(
+ data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
+ )
+ total_sent += sent
+
+ def shutdown(self, how: int) -> None:
+ try:
+ self.connection.shutdown()
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError(f"shutdown error: {e!r}") from e
+
+ def close(self) -> None:
+ self._closed = True
+ if self._io_refs <= 0:
+ self._real_close()
+
+ def _real_close(self) -> None:
+ try:
+ return self.connection.close() # type: ignore[no-any-return]
+ except OpenSSL.SSL.Error:
+ return
+
+ def getpeercert(
+ self, binary_form: bool = False
+ ) -> dict[str, list[typing.Any]] | None:
+ x509 = self.connection.get_peer_certificate()
+
+ if not x509:
+ return x509 # type: ignore[no-any-return]
+
+ if binary_form:
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) # type: ignore[no-any-return]
+
+ return {
+ "subject": ((("commonName", x509.get_subject().CN),),), # type: ignore[dict-item]
+ "subjectAltName": get_subj_alt_name(x509),
+ }
+
+ def version(self) -> str:
+ return self.connection.get_protocol_version_name() # type: ignore[no-any-return]
+
+ def selected_alpn_protocol(self) -> str | None:
+ alpn_proto = self.connection.get_alpn_proto_negotiated()
+ return alpn_proto.decode() if alpn_proto else None
+
+
+WrappedSocket.makefile = socket_cls.makefile # type: ignore[attr-defined]
+
+
+class PyOpenSSLContext:
+ """
+ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
+ for translating the interface of the standard library ``SSLContext`` object
+ to calls into PyOpenSSL.
+ """
+
+ def __init__(self, protocol: int) -> None:
+ self.protocol = _openssl_versions[protocol]
+ self._ctx = OpenSSL.SSL.Context(self.protocol)
+ self._options = 0
+ self.check_hostname = False
+ self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED
+ self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED
+ self._verify_flags: int = ssl.VERIFY_X509_TRUSTED_FIRST
+
+ @property
+ def options(self) -> int:
+ return self._options
+
+ @options.setter
+ def options(self, value: int) -> None:
+ self._options = value
+ self._set_ctx_options()
+
+ @property
+ def verify_flags(self) -> int:
+ return self._verify_flags
+
+ @verify_flags.setter
+ def verify_flags(self, value: int) -> None:
+ self._verify_flags = value
+ self._ctx.get_cert_store().set_flags(self._verify_flags)
+
+ @property
+ def verify_mode(self) -> int:
+ return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
+
+ @verify_mode.setter
+ def verify_mode(self, value: ssl.VerifyMode) -> None:
+ self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
+
+ def set_default_verify_paths(self) -> None:
+ self._ctx.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers: bytes | str) -> None:
+ if isinstance(ciphers, str):
+ ciphers = ciphers.encode("utf-8")
+ self._ctx.set_cipher_list(ciphers)
+
+ def load_verify_locations(
+ self,
+ cafile: str | None = None,
+ capath: str | None = None,
+ cadata: bytes | None = None,
+ ) -> None:
+ if cafile is not None:
+ cafile = cafile.encode("utf-8") # type: ignore[assignment]
+ if capath is not None:
+ capath = capath.encode("utf-8") # type: ignore[assignment]
+ try:
+ self._ctx.load_verify_locations(cafile, capath)
+ if cadata is not None:
+ self._ctx.load_verify_locations(BytesIO(cadata))
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError(f"unable to load trusted certificates: {e!r}") from e
+
+ def load_cert_chain(
+ self,
+ certfile: str,
+ keyfile: str | None = None,
+ password: str | None = None,
+ ) -> None:
+ try:
+ self._ctx.use_certificate_chain_file(certfile)
+ if password is not None:
+ if not isinstance(password, bytes):
+ password = password.encode("utf-8") # type: ignore[assignment]
+ self._ctx.set_passwd_cb(lambda *_: password)
+ self._ctx.use_privatekey_file(keyfile or certfile)
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError(f"Unable to load certificate chain: {e!r}") from e
+
+ def set_alpn_protocols(self, protocols: list[bytes | str]) -> None:
+ protocols = [util.util.to_bytes(p, "ascii") for p in protocols]
+ return self._ctx.set_alpn_protos(protocols) # type: ignore[no-any-return]
+
+ def wrap_socket(
+ self,
+ sock: socket_cls,
+ server_side: bool = False,
+ do_handshake_on_connect: bool = True,
+ suppress_ragged_eofs: bool = True,
+ server_hostname: bytes | str | None = None,
+ ) -> WrappedSocket:
+ cnx = OpenSSL.SSL.Connection(self._ctx, sock)
+
+ # If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3
+ if server_hostname and not util.ssl_.is_ipaddress(server_hostname):
+ if isinstance(server_hostname, str):
+ server_hostname = server_hostname.encode("utf-8")
+ cnx.set_tlsext_host_name(server_hostname)
+
+ cnx.set_connect_state()
+
+ while True:
+ try:
+ cnx.do_handshake()
+ except OpenSSL.SSL.WantReadError as e:
+ if not util.wait_for_read(sock, sock.gettimeout()):
+ raise timeout("select timed out") from e
+ continue
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError(f"bad handshake: {e!r}") from e
+ break
+
+ return WrappedSocket(cnx, sock)
+
+ def _set_ctx_options(self) -> None:
+ self._ctx.set_options(
+ self._options
+ | _openssl_to_ssl_minimum_version[self._minimum_version]
+ | _openssl_to_ssl_maximum_version[self._maximum_version]
+ )
+
+ @property
+ def minimum_version(self) -> int:
+ return self._minimum_version
+
+ @minimum_version.setter
+ def minimum_version(self, minimum_version: int) -> None:
+ self._minimum_version = minimum_version
+ self._set_ctx_options()
+
+ @property
+ def maximum_version(self) -> int:
+ return self._maximum_version
+
+ @maximum_version.setter
+ def maximum_version(self, maximum_version: int) -> None:
+ self._maximum_version = maximum_version
+ self._set_ctx_options()
+
+
+def _verify_callback(
+ cnx: OpenSSL.SSL.Connection,
+ x509: X509,
+ err_no: int,
+ err_depth: int,
+ return_code: int,
+) -> bool:
+ return err_no == 0
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/socks.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/socks.py"
new file mode 100644
index 0000000..e3239b5
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/contrib/socks.py"
@@ -0,0 +1,228 @@
+"""
+This module contains provisional support for SOCKS proxies from within
+urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
+SOCKS5. To enable its functionality, either install PySocks or install this
+module with the ``socks`` extra.
+
+The SOCKS implementation supports the full range of urllib3 features. It also
+supports the following SOCKS features:
+
+- SOCKS4A (``proxy_url='socks4a://...``)
+- SOCKS4 (``proxy_url='socks4://...``)
+- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+- SOCKS5 with local DNS (``proxy_url='socks5://...``)
+- Usernames and passwords for the SOCKS proxy
+
+.. note::
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
+ server instead of client-side when connecting to a domain name.
+
+SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+supports IPv4, IPv6, and domain names.
+
+When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+will be sent as the ``userid`` section of the SOCKS request:
+
+.. code-block:: python
+
+ proxy_url="socks4a://<userid>@proxy-host"
+
+When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+of the ``proxy_url`` will be sent as the username/password to authenticate
+with the proxy:
+
+.. code-block:: python
+
+ proxy_url="socks5h://<username>:<password>@proxy-host"
+
+"""
+
+from __future__ import annotations
+
+try:
+ import socks # type: ignore[import-untyped]
+except ImportError:
+ import warnings
+
+ from ..exceptions import DependencyWarning
+
+ warnings.warn(
+ (
+ "SOCKS support in urllib3 requires the installation of optional "
+ "dependencies: specifically, PySocks. For more information, see "
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html#socks-proxies"
+ ),
+ DependencyWarning,
+ )
+ raise
+
+import typing
+from socket import timeout as SocketTimeout
+
+from ..connection import HTTPConnection, HTTPSConnection
+from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from ..exceptions import ConnectTimeoutError, NewConnectionError
+from ..poolmanager import PoolManager
+from ..util.url import parse_url
+
+try:
+ import ssl
+except ImportError:
+ ssl = None # type: ignore[assignment]
+
+
+class _TYPE_SOCKS_OPTIONS(typing.TypedDict):
+ socks_version: int
+ proxy_host: str | None
+ proxy_port: str | None
+ username: str | None
+ password: str | None
+ rdns: bool
+
+
+class SOCKSConnection(HTTPConnection):
+ """
+ A plain-text HTTP connection that connects via a SOCKS proxy.
+ """
+
+ def __init__(
+ self,
+ _socks_options: _TYPE_SOCKS_OPTIONS,
+ *args: typing.Any,
+ **kwargs: typing.Any,
+ ) -> None:
+ self._socks_options = _socks_options
+ super().__init__(*args, **kwargs)
+
+ def _new_conn(self) -> socks.socksocket:
+ """
+ Establish a new connection via the SOCKS proxy.
+ """
+ extra_kw: dict[str, typing.Any] = {}
+ if self.source_address:
+ extra_kw["source_address"] = self.source_address
+
+ if self.socket_options:
+ extra_kw["socket_options"] = self.socket_options
+
+ try:
+ conn = socks.create_connection(
+ (self.host, self.port),
+ proxy_type=self._socks_options["socks_version"],
+ proxy_addr=self._socks_options["proxy_host"],
+ proxy_port=self._socks_options["proxy_port"],
+ proxy_username=self._socks_options["username"],
+ proxy_password=self._socks_options["password"],
+ proxy_rdns=self._socks_options["rdns"],
+ timeout=self.timeout,
+ **extra_kw,
+ )
+
+ except SocketTimeout as e:
+ raise ConnectTimeoutError(
+ self,
+ f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
+ ) from e
+
+ except socks.ProxyError as e:
+ # This is fragile as hell, but it seems to be the only way to raise
+ # useful errors here.
+ if e.socket_err:
+ error = e.socket_err
+ if isinstance(error, SocketTimeout):
+ raise ConnectTimeoutError(
+ self,
+ f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
+ ) from e
+ else:
+ # Adding `from e` messes with coverage somehow, so it's omitted.
+ # See #2386.
+ raise NewConnectionError(
+ self, f"Failed to establish a new connection: {error}"
+ )
+ else:
+ raise NewConnectionError(
+ self, f"Failed to establish a new connection: {e}"
+ ) from e
+
+ except OSError as e: # Defensive: PySocks should catch all these.
+ raise NewConnectionError(
+ self, f"Failed to establish a new connection: {e}"
+ ) from e
+
+ return conn
+
+
+# We don't need to duplicate the Verified/Unverified distinction from
+# urllib3/connection.py here because the HTTPSConnection will already have been
+# correctly set to either the Verified or Unverified form by that module. This
+# means the SOCKSHTTPSConnection will automatically be the correct type.
+class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
+ pass
+
+
+class SOCKSHTTPConnectionPool(HTTPConnectionPool):
+ ConnectionCls = SOCKSConnection
+
+
+class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
+ ConnectionCls = SOCKSHTTPSConnection
+
+
+class SOCKSProxyManager(PoolManager):
+ """
+ A version of the urllib3 ProxyManager that routes connections via the
+ defined SOCKS proxy.
+ """
+
+ pool_classes_by_scheme = {
+ "http": SOCKSHTTPConnectionPool,
+ "https": SOCKSHTTPSConnectionPool,
+ }
+
+ def __init__(
+ self,
+ proxy_url: str,
+ username: str | None = None,
+ password: str | None = None,
+ num_pools: int = 10,
+ headers: typing.Mapping[str, str] | None = None,
+ **connection_pool_kw: typing.Any,
+ ):
+ parsed = parse_url(proxy_url)
+
+ if username is None and password is None and parsed.auth is not None:
+ split = parsed.auth.split(":")
+ if len(split) == 2:
+ username, password = split
+ if parsed.scheme == "socks5":
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = False
+ elif parsed.scheme == "socks5h":
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = True
+ elif parsed.scheme == "socks4":
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = False
+ elif parsed.scheme == "socks4a":
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = True
+ else:
+ raise ValueError(f"Unable to determine SOCKS version from {proxy_url}")
+
+ self.proxy_url = proxy_url
+
+ socks_options = {
+ "socks_version": socks_version,
+ "proxy_host": parsed.host,
+ "proxy_port": parsed.port,
+ "username": username,
+ "password": password,
+ "rdns": rdns,
+ }
+ connection_pool_kw["_socks_options"] = socks_options
+
+ super().__init__(num_pools, headers, **connection_pool_kw)
+
+ self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/exceptions.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/exceptions.py"
new file mode 100644
index 0000000..58723fa
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/exceptions.py"
@@ -0,0 +1,335 @@
+from __future__ import annotations
+
+import socket
+import typing
+import warnings
+from email.errors import MessageDefect
+from http.client import IncompleteRead as httplib_IncompleteRead
+
+if typing.TYPE_CHECKING:
+ from .connection import HTTPConnection
+ from .connectionpool import ConnectionPool
+ from .response import HTTPResponse
+ from .util.retry import Retry
+
+# Base Exceptions
+
+
+class HTTPError(Exception):
+ """Base exception used by this module."""
+
+
+class HTTPWarning(Warning):
+ """Base warning used by this module."""
+
+
+_TYPE_REDUCE_RESULT = tuple[typing.Callable[..., object], tuple[object, ...]]
+
+
+class PoolError(HTTPError):
+ """Base exception for errors caused within a pool."""
+
+ def __init__(self, pool: ConnectionPool, message: str) -> None:
+ self.pool = pool
+ self._message = message
+ super().__init__(f"{pool}: {message}")
+
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
+ # For pickling purposes.
+ return self.__class__, (None, self._message)
+
+
+class RequestError(PoolError):
+ """Base exception for PoolErrors that have associated URLs."""
+
+ def __init__(self, pool: ConnectionPool, url: str | None, message: str) -> None:
+ self.url = url
+ super().__init__(pool, message)
+
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
+ # For pickling purposes.
+ return self.__class__, (None, self.url, self._message)
+
+
+class SSLError(HTTPError):
+ """Raised when SSL certificate fails in an HTTPS connection."""
+
+
+class ProxyError(HTTPError):
+ """Raised when the connection to a proxy fails."""
+
+ # The original error is also available as __cause__.
+ original_error: Exception
+
+ def __init__(self, message: str, error: Exception) -> None:
+ super().__init__(message, error)
+ self.original_error = error
+
+
+class DecodeError(HTTPError):
+ """Raised when automatic decoding based on Content-Type fails."""
+
+
+class ProtocolError(HTTPError):
+ """Raised when something unexpected happens mid-request/response."""
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+
+class MaxRetryError(RequestError):
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param str url: The requested Url
+ :param reason: The underlying error
+ :type reason: :class:`Exception`
+
+ """
+
+ def __init__(
+ self, pool: ConnectionPool, url: str | None, reason: Exception | None = None
+ ) -> None:
+ self.reason = reason
+
+ message = f"Max retries exceeded with url: {url} (Caused by {reason!r})"
+
+ super().__init__(pool, url, message)
+
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
+ # For pickling purposes.
+ return self.__class__, (None, self.url, self.reason)
+
+
+class HostChangedError(RequestError):
+ """Raised when an existing pool gets a request for a foreign host."""
+
+ def __init__(
+ self, pool: ConnectionPool, url: str, retries: Retry | int = 3
+ ) -> None:
+ message = f"Tried to open a foreign host with url: {url}"
+ super().__init__(pool, url, message)
+ self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+ """Raised when passing an invalid state to a timeout"""
+
+
+class TimeoutError(HTTPError):
+ """Raised when a socket timeout error occurs.
+
+ Catching this error will catch both :exc:`ReadTimeoutErrors
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+ """
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+ """Raised when a socket timeout occurs while receiving data from a server"""
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+ """Raised when a socket timeout occurs while connecting to a server"""
+
+
+class NewConnectionError(ConnectTimeoutError, HTTPError):
+ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
+
+ def __init__(self, conn: HTTPConnection, message: str) -> None:
+ self.conn = conn
+ self._message = message
+ super().__init__(f"{conn}: {message}")
+
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
+ # For pickling purposes.
+ return self.__class__, (None, self._message)
+
+ @property
+ def pool(self) -> HTTPConnection:
+ warnings.warn(
+ "The 'pool' property is deprecated and will be removed "
+ "in urllib3 v2.1.0. Use 'conn' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return self.conn
+
+
+class NameResolutionError(NewConnectionError):
+ """Raised when host name resolution fails."""
+
+ def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror):
+ message = f"Failed to resolve '{host}' ({reason})"
+ self._host = host
+ self._reason = reason
+ super().__init__(conn, message)
+
+ def __reduce__(self) -> _TYPE_REDUCE_RESULT:
+ # For pickling purposes.
+ return self.__class__, (self._host, None, self._reason)
+
+
+class EmptyPoolError(PoolError):
+ """Raised when a pool runs out of connections and no more are allowed."""
+
+
+class FullPoolError(PoolError):
+ """Raised when we try to add a connection to a full pool in blocking mode."""
+
+
+class ClosedPoolError(PoolError):
+ """Raised when a request enters a pool after the pool has been closed."""
+
+
+class LocationValueError(ValueError, HTTPError):
+ """Raised when there is something wrong with a given URL input."""
+
+
+class LocationParseError(LocationValueError):
+ """Raised when get_host or similar fails to parse the URL input."""
+
+ def __init__(self, location: str) -> None:
+ message = f"Failed to parse: {location}"
+ super().__init__(message)
+
+ self.location = location
+
+
+class URLSchemeUnknown(LocationValueError):
+ """Raised when a URL input has an unsupported scheme."""
+
+ def __init__(self, scheme: str):
+ message = f"Not supported URL scheme {scheme}"
+ super().__init__(message)
+
+ self.scheme = scheme
+
+
+class ResponseError(HTTPError):
+ """Used as a container for an error reason supplied in a MaxRetryError."""
+
+ GENERIC_ERROR = "too many error responses"
+ SPECIFIC_ERROR = "too many {status_code} error responses"
+
+
+class SecurityWarning(HTTPWarning):
+ """Warned when performing security reducing actions"""
+
+
+class InsecureRequestWarning(SecurityWarning):
+ """Warned when making an unverified HTTPS request."""
+
+
+class NotOpenSSLWarning(SecurityWarning):
+ """Warned when using unsupported SSL library"""
+
+
+class SystemTimeWarning(SecurityWarning):
+ """Warned when system time is suspected to be wrong"""
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ """Warned when certain TLS/SSL configuration is not available on a platform."""
+
+
+class DependencyWarning(HTTPWarning):
+ """
+ Warned when an attempt is made to import a module with missing optional
+ dependencies.
+ """
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ """Response needs to be chunked in order to read it as chunks."""
+
+
+class BodyNotHttplibCompatible(HTTPError):
+ """
+ Body should be :class:`http.client.HTTPResponse` like
+ (have an fp attribute which returns raw chunks) for read_chunked().
+ """
+
+
+class IncompleteRead(HTTPError, httplib_IncompleteRead):
+ """
+ Response length doesn't match expected Content-Length
+
+ Subclass of :class:`http.client.IncompleteRead` to allow int value
+ for ``partial`` to avoid creating large objects on streamed reads.
+ """
+
+ partial: int # type: ignore[assignment]
+ expected: int
+
+ def __init__(self, partial: int, expected: int) -> None:
+ self.partial = partial
+ self.expected = expected
+
+ def __repr__(self) -> str:
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
+ self.partial,
+ self.expected,
+ )
+
+
+class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
+ """Invalid chunk length in a chunked response."""
+
+ def __init__(self, response: HTTPResponse, length: bytes) -> None:
+ self.partial: int = response.tell() # type: ignore[assignment]
+ self.expected: int | None = response.length_remaining
+ self.response = response
+ self.length = length
+
+ def __repr__(self) -> str:
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
+ self.length,
+ self.partial,
+ )
+
+
+class InvalidHeader(HTTPError):
+ """The header provided was somehow invalid."""
+
+
+class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
+ """ProxyManager does not support the supplied scheme"""
+
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+ def __init__(self, scheme: str | None) -> None:
+ # 'localhost' is here because our URL parser parses
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
+ if scheme == "localhost":
+ scheme = None
+ if scheme is None:
+ message = "Proxy URL had no scheme, should start with http:// or https://"
+ else:
+ message = f"Proxy URL had unsupported scheme {scheme}, should use http:// or https://"
+ super().__init__(message)
+
+
+class ProxySchemeUnsupported(ValueError):
+ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
+
+
+class HeaderParsingError(HTTPError):
+ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
+
+ def __init__(
+ self, defects: list[MessageDefect], unparsed_data: bytes | str | None
+ ) -> None:
+ message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}"
+ super().__init__(message)
+
+
+class UnrewindableBodyError(HTTPError):
+ """urllib3 encountered an error when trying to rewind a body"""
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/fields.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/fields.py"
new file mode 100644
index 0000000..97c4730
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/fields.py"
@@ -0,0 +1,341 @@
+from __future__ import annotations
+
+import email.utils
+import mimetypes
+import typing
+
+_TYPE_FIELD_VALUE = typing.Union[str, bytes]
+_TYPE_FIELD_VALUE_TUPLE = typing.Union[
+ _TYPE_FIELD_VALUE,
+ tuple[str, _TYPE_FIELD_VALUE],
+ tuple[str, _TYPE_FIELD_VALUE, str],
+]
+
+
+def guess_content_type(
+ filename: str | None, default: str = "application/octet-stream"
+) -> str:
+ """
+ Guess the "Content-Type" of a file.
+
+ :param filename:
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+ :param default:
+ If no "Content-Type" can be guessed, default to `default`.
+ """
+ if filename:
+ return mimetypes.guess_type(filename)[0] or default
+ return default
+
+
+def format_header_param_rfc2231(name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ Helper function to format and quote a single header parameter using the
+ strategy defined in RFC 2231.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows
+ `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :returns:
+ An RFC-2231-formatted unicode string.
+
+ .. deprecated:: 2.0.0
+ Will be removed in urllib3 v2.1.0. This is not valid for
+ ``multipart/form-data`` header parameters.
+ """
+ import warnings
+
+ warnings.warn(
+ "'format_header_param_rfc2231' is deprecated and will be "
+ "removed in urllib3 v2.1.0. This is not valid for "
+ "multipart/form-data header parameters.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if isinstance(value, bytes):
+ value = value.decode("utf-8")
+
+ if not any(ch in value for ch in '"\\\r\n'):
+ result = f'{name}="{value}"'
+ try:
+ result.encode("ascii")
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ pass
+ else:
+ return result
+
+ value = email.utils.encode_rfc2231(value, "utf-8")
+ value = f"{name}*={value}"
+
+ return value
+
+
+def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ Format and quote a single multipart header parameter.
+
+ This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching
+ the behavior of current browser and curl versions. Values are
+ assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are
+ percent encoded.
+
+ .. _WHATWG HTML Standard:
+ https://html.spec.whatwg.org/multipage/
+ form-control-infrastructure.html#multipart-form-data
+
+ :param name:
+ The name of the parameter, an ASCII-only ``str``.
+ :param value:
+ The value of the parameter, a ``str`` or UTF-8 encoded
+ ``bytes``.
+ :returns:
+ A string ``name="value"`` with the escaped value.
+
+ .. versionchanged:: 2.0.0
+ Matches the WHATWG HTML Standard as of 2021/06/10. Control
+ characters are no longer percent encoded.
+
+ .. versionchanged:: 2.0.0
+ Renamed from ``format_header_param_html5`` and
+ ``format_header_param``. The old names will be removed in
+ urllib3 v2.1.0.
+ """
+ if isinstance(value, bytes):
+ value = value.decode("utf-8")
+
+ # percent encode \n \r "
+ value = value.translate({10: "%0A", 13: "%0D", 34: "%22"})
+ return f'{name}="{value}"'
+
+
+def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ .. deprecated:: 2.0.0
+ Renamed to :func:`format_multipart_header_param`. Will be
+ removed in urllib3 v2.1.0.
+ """
+ import warnings
+
+ warnings.warn(
+ "'format_header_param_html5' has been renamed to "
+ "'format_multipart_header_param'. The old name will be "
+ "removed in urllib3 v2.1.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return format_multipart_header_param(name, value)
+
+
+def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ .. deprecated:: 2.0.0
+ Renamed to :func:`format_multipart_header_param`. Will be
+ removed in urllib3 v2.1.0.
+ """
+ import warnings
+
+ warnings.warn(
+ "'format_header_param' has been renamed to "
+ "'format_multipart_header_param'. The old name will be "
+ "removed in urllib3 v2.1.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return format_multipart_header_param(name, value)
+
+
+class RequestField:
+ """
+ A data container for request body parameters.
+
+ :param name:
+ The name of this request field. Must be unicode.
+ :param data:
+ The data/value body.
+ :param filename:
+ An optional filename of the request field. Must be unicode.
+ :param headers:
+ An optional dict-like object of headers to initially use for the field.
+
+ .. versionchanged:: 2.0.0
+ The ``header_formatter`` parameter is deprecated and will
+ be removed in urllib3 v2.1.0.
+ """
+
+ def __init__(
+ self,
+ name: str,
+ data: _TYPE_FIELD_VALUE,
+ filename: str | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None,
+ ):
+ self._name = name
+ self._filename = filename
+ self.data = data
+ self.headers: dict[str, str | None] = {}
+ if headers:
+ self.headers = dict(headers)
+
+ if header_formatter is not None:
+ import warnings
+
+ warnings.warn(
+ "The 'header_formatter' parameter is deprecated and "
+ "will be removed in urllib3 v2.1.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.header_formatter = header_formatter
+ else:
+ self.header_formatter = format_multipart_header_param
+
+ @classmethod
+ def from_tuples(
+ cls,
+ fieldname: str,
+ value: _TYPE_FIELD_VALUE_TUPLE,
+ header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None,
+ ) -> RequestField:
+ """
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
+
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+
+ Field names and filenames must be unicode.
+ """
+ filename: str | None
+ content_type: str | None
+ data: _TYPE_FIELD_VALUE
+
+ if isinstance(value, tuple):
+ if len(value) == 3:
+ filename, data, content_type = value
+ else:
+ filename, data = value
+ content_type = guess_content_type(filename)
+ else:
+ filename = None
+ content_type = None
+ data = value
+
+ request_param = cls(
+ fieldname, data, filename=filename, header_formatter=header_formatter
+ )
+ request_param.make_multipart(content_type=content_type)
+
+ return request_param
+
+ def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str:
+ """
+ Override this method to change how each multipart header
+ parameter is formatted. By default, this calls
+ :func:`format_multipart_header_param`.
+
+ :param name:
+ The name of the parameter, an ASCII-only ``str``.
+ :param value:
+ The value of the parameter, a ``str`` or UTF-8 encoded
+ ``bytes``.
+
+ :meta public:
+ """
+ return self.header_formatter(name, value)
+
+ def _render_parts(
+ self,
+ header_parts: (
+ dict[str, _TYPE_FIELD_VALUE | None]
+ | typing.Sequence[tuple[str, _TYPE_FIELD_VALUE | None]]
+ ),
+ ) -> str:
+ """
+ Helper function to format and quote a single header.
+
+ Useful for single headers that are composed of multiple items. E.g.,
+ 'Content-Disposition' fields.
+
+ :param header_parts:
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
+ """
+ iterable: typing.Iterable[tuple[str, _TYPE_FIELD_VALUE | None]]
+
+ parts = []
+ if isinstance(header_parts, dict):
+ iterable = header_parts.items()
+ else:
+ iterable = header_parts
+
+ for name, value in iterable:
+ if value is not None:
+ parts.append(self._render_part(name, value))
+
+ return "; ".join(parts)
+
+ def render_headers(self) -> str:
+ """
+ Renders the headers for this request field.
+ """
+ lines = []
+
+ sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
+ for sort_key in sort_keys:
+ if self.headers.get(sort_key, False):
+ lines.append(f"{sort_key}: {self.headers[sort_key]}")
+
+ for header_name, header_value in self.headers.items():
+ if header_name not in sort_keys:
+ if header_value:
+ lines.append(f"{header_name}: {header_value}")
+
+ lines.append("\r\n")
+ return "\r\n".join(lines)
+
+ def make_multipart(
+ self,
+ content_disposition: str | None = None,
+ content_type: str | None = None,
+ content_location: str | None = None,
+ ) -> None:
+ """
+ Makes this request field into a multipart request field.
+
+ This method overrides "Content-Disposition", "Content-Type" and
+ "Content-Location" headers to the request parameter.
+
+ :param content_disposition:
+ The 'Content-Disposition' of the request body. Defaults to 'form-data'
+ :param content_type:
+ The 'Content-Type' of the request body.
+ :param content_location:
+ The 'Content-Location' of the request body.
+
+ """
+ content_disposition = (content_disposition or "form-data") + "; ".join(
+ [
+ "",
+ self._render_parts(
+ (("name", self._name), ("filename", self._filename))
+ ),
+ ]
+ )
+
+ self.headers["Content-Disposition"] = content_disposition
+ self.headers["Content-Type"] = content_type
+ self.headers["Content-Location"] = content_location
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/filepost.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/filepost.py"
new file mode 100644
index 0000000..14f70b0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/filepost.py"
@@ -0,0 +1,89 @@
+from __future__ import annotations
+
+import binascii
+import codecs
+import os
+import typing
+from io import BytesIO
+
+from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField
+
+writer = codecs.lookup("utf-8")[3]
+
+_TYPE_FIELDS_SEQUENCE = typing.Sequence[
+ typing.Union[tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField]
+]
+_TYPE_FIELDS = typing.Union[
+ _TYPE_FIELDS_SEQUENCE,
+ typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE],
+]
+
+
+def choose_boundary() -> str:
+ """
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
+ """
+ return binascii.hexlify(os.urandom(16)).decode()
+
+
+def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]:
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts, and lists of
+ :class:`~urllib3.fields.RequestField`.
+
+ """
+ iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]]
+
+ if isinstance(fields, typing.Mapping):
+ iterable = fields.items()
+ else:
+ iterable = fields
+
+ for field in iterable:
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
+
+
+def encode_multipart_formdata(
+ fields: _TYPE_FIELDS, boundary: str | None = None
+) -> tuple[bytes, str]:
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+ :param fields:
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+ Values are processed by :func:`urllib3.fields.RequestField.from_tuples`.
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`urllib3.filepost.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for field in iter_field_objects(fields):
+ body.write(f"--{boundary}\r\n".encode("latin-1"))
+
+ writer(body).write(field.render_headers())
+ data = field.data
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, str):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b"\r\n")
+
+ body.write(f"--{boundary}--\r\n".encode("latin-1"))
+
+ content_type = f"multipart/form-data; boundary={boundary}"
+
+ return body.getvalue(), content_type
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/__init__.py"
new file mode 100644
index 0000000..133e1d8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/__init__.py"
@@ -0,0 +1,53 @@
+from __future__ import annotations
+
+from importlib.metadata import version
+
+__all__ = [
+ "inject_into_urllib3",
+ "extract_from_urllib3",
+]
+
+import typing
+
+orig_HTTPSConnection: typing.Any = None
+
+
+def inject_into_urllib3() -> None:
+ # First check if h2 version is valid
+ h2_version = version("h2")
+ if not h2_version.startswith("4."):
+ raise ImportError(
+ "urllib3 v2 supports h2 version 4.x.x, currently "
+ f"the 'h2' module is compiled with {h2_version!r}. "
+ "See: https://github.com/urllib3/urllib3/issues/3290"
+ )
+
+ # Import here to avoid circular dependencies.
+ from .. import connection as urllib3_connection
+ from .. import util as urllib3_util
+ from ..connectionpool import HTTPSConnectionPool
+ from ..util import ssl_ as urllib3_util_ssl
+ from .connection import HTTP2Connection
+
+ global orig_HTTPSConnection
+ orig_HTTPSConnection = urllib3_connection.HTTPSConnection
+
+ HTTPSConnectionPool.ConnectionCls = HTTP2Connection
+ urllib3_connection.HTTPSConnection = HTTP2Connection # type: ignore[misc]
+
+ # TODO: Offer 'http/1.1' as well, but for testing purposes this is handy.
+ urllib3_util.ALPN_PROTOCOLS = ["h2"]
+ urllib3_util_ssl.ALPN_PROTOCOLS = ["h2"]
+
+
+def extract_from_urllib3() -> None:
+ from .. import connection as urllib3_connection
+ from .. import util as urllib3_util
+ from ..connectionpool import HTTPSConnectionPool
+ from ..util import ssl_ as urllib3_util_ssl
+
+ HTTPSConnectionPool.ConnectionCls = orig_HTTPSConnection
+ urllib3_connection.HTTPSConnection = orig_HTTPSConnection # type: ignore[misc]
+
+ urllib3_util.ALPN_PROTOCOLS = ["http/1.1"]
+ urllib3_util_ssl.ALPN_PROTOCOLS = ["http/1.1"]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/connection.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/connection.py"
new file mode 100644
index 0000000..7534b77
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/connection.py"
@@ -0,0 +1,356 @@
+from __future__ import annotations
+
+import logging
+import re
+import threading
+import types
+import typing
+
+import h2.config # type: ignore[import-untyped]
+import h2.connection # type: ignore[import-untyped]
+import h2.events # type: ignore[import-untyped]
+
+from .._base_connection import _TYPE_BODY
+from .._collections import HTTPHeaderDict
+from ..connection import HTTPSConnection, _get_default_user_agent
+from ..exceptions import ConnectionError
+from ..response import BaseHTTPResponse
+
+orig_HTTPSConnection = HTTPSConnection
+
+T = typing.TypeVar("T")
+
+log = logging.getLogger(__name__)
+
+RE_IS_LEGAL_HEADER_NAME = re.compile(rb"^[!#$%&'*+\-.^_`|~0-9a-z]+$")
+RE_IS_ILLEGAL_HEADER_VALUE = re.compile(rb"[\0\x00\x0a\x0d\r\n]|^[ \r\n\t]|[ \r\n\t]$")
+
+
+def _is_legal_header_name(name: bytes) -> bool:
+ """
+ "An implementation that validates fields according to the definitions in Sections
+ 5.1 and 5.5 of [HTTP] only needs an additional check that field names do not
+ include uppercase characters." (https://httpwg.org/specs/rfc9113.html#n-field-validity)
+
+ `http.client._is_legal_header_name` does not validate the field name according to the
+ HTTP 1.1 spec, so we do that here, in addition to checking for uppercase characters.
+
+ This does not allow for the `:` character in the header name, so should not
+ be used to validate pseudo-headers.
+ """
+ return bool(RE_IS_LEGAL_HEADER_NAME.match(name))
+
+
+def _is_illegal_header_value(value: bytes) -> bool:
+ """
+ "A field value MUST NOT contain the zero value (ASCII NUL, 0x00), line feed
+ (ASCII LF, 0x0a), or carriage return (ASCII CR, 0x0d) at any position. A field
+ value MUST NOT start or end with an ASCII whitespace character (ASCII SP or HTAB,
+ 0x20 or 0x09)." (https://httpwg.org/specs/rfc9113.html#n-field-validity)
+ """
+ return bool(RE_IS_ILLEGAL_HEADER_VALUE.search(value))
+
+
+class _LockedObject(typing.Generic[T]):
+ """
+ A wrapper class that hides a specific object behind a lock.
+ The goal here is to provide a simple way to protect access to an object
+ that cannot safely be simultaneously accessed from multiple threads. The
+ intended use of this class is simple: take hold of it with a context
+ manager, which returns the protected object.
+ """
+
+ __slots__ = (
+ "lock",
+ "_obj",
+ )
+
+ def __init__(self, obj: T):
+ self.lock = threading.RLock()
+ self._obj = obj
+
+ def __enter__(self) -> T:
+ self.lock.acquire()
+ return self._obj
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: types.TracebackType | None,
+ ) -> None:
+ self.lock.release()
+
+
+class HTTP2Connection(HTTPSConnection):
+ def __init__(
+ self, host: str, port: int | None = None, **kwargs: typing.Any
+ ) -> None:
+ self._h2_conn = self._new_h2_conn()
+ self._h2_stream: int | None = None
+ self._headers: list[tuple[bytes, bytes]] = []
+
+ if "proxy" in kwargs or "proxy_config" in kwargs: # Defensive:
+ raise NotImplementedError("Proxies aren't supported with HTTP/2")
+
+ super().__init__(host, port, **kwargs)
+
+ if self._tunnel_host is not None:
+ raise NotImplementedError("Tunneling isn't supported with HTTP/2")
+
+ def _new_h2_conn(self) -> _LockedObject[h2.connection.H2Connection]:
+ config = h2.config.H2Configuration(client_side=True)
+ return _LockedObject(h2.connection.H2Connection(config=config))
+
+ def connect(self) -> None:
+ super().connect()
+ with self._h2_conn as conn:
+ conn.initiate_connection()
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+
+ def putrequest( # type: ignore[override]
+ self,
+ method: str,
+ url: str,
+ **kwargs: typing.Any,
+ ) -> None:
+ """putrequest
+ This deviates from the HTTPConnection method signature since we never need to override
+ sending accept-encoding headers or the host header.
+ """
+ if "skip_host" in kwargs:
+ raise NotImplementedError("`skip_host` isn't supported")
+ if "skip_accept_encoding" in kwargs:
+ raise NotImplementedError("`skip_accept_encoding` isn't supported")
+
+ self._request_url = url or "/"
+ self._validate_path(url) # type: ignore[attr-defined]
+
+ if ":" in self.host:
+ authority = f"[{self.host}]:{self.port or 443}"
+ else:
+ authority = f"{self.host}:{self.port or 443}"
+
+ self._headers.append((b":scheme", b"https"))
+ self._headers.append((b":method", method.encode()))
+ self._headers.append((b":authority", authority.encode()))
+ self._headers.append((b":path", url.encode()))
+
+ with self._h2_conn as conn:
+ self._h2_stream = conn.get_next_available_stream_id()
+
+ def putheader(self, header: str | bytes, *values: str | bytes) -> None: # type: ignore[override]
+ # TODO SKIPPABLE_HEADERS from urllib3 are ignored.
+ header = header.encode() if isinstance(header, str) else header
+ header = header.lower() # A lot of upstream code uses capitalized headers.
+ if not _is_legal_header_name(header):
+ raise ValueError(f"Illegal header name {str(header)}")
+
+ for value in values:
+ value = value.encode() if isinstance(value, str) else value
+ if _is_illegal_header_value(value):
+ raise ValueError(f"Illegal header value {str(value)}")
+ self._headers.append((header, value))
+
+ def endheaders(self, message_body: typing.Any = None) -> None: # type: ignore[override]
+ if self._h2_stream is None:
+ raise ConnectionError("Must call `putrequest` first.")
+
+ with self._h2_conn as conn:
+ conn.send_headers(
+ stream_id=self._h2_stream,
+ headers=self._headers,
+ end_stream=(message_body is None),
+ )
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+ self._headers = [] # Reset headers for the next request.
+
+ def send(self, data: typing.Any) -> None:
+ """Send data to the server.
+ `data` can be: `str`, `bytes`, an iterable, or file-like objects
+ that support a .read() method.
+ """
+ if self._h2_stream is None:
+ raise ConnectionError("Must call `putrequest` first.")
+
+ with self._h2_conn as conn:
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+
+ if hasattr(data, "read"): # file-like objects
+ while True:
+ chunk = data.read(self.blocksize)
+ if not chunk:
+ break
+ if isinstance(chunk, str):
+ chunk = chunk.encode()
+ conn.send_data(self._h2_stream, chunk, end_stream=False)
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+ conn.end_stream(self._h2_stream)
+ return
+
+ if isinstance(data, str): # str -> bytes
+ data = data.encode()
+
+ try:
+ if isinstance(data, bytes):
+ conn.send_data(self._h2_stream, data, end_stream=True)
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+ else:
+ for chunk in data:
+ conn.send_data(self._h2_stream, chunk, end_stream=False)
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+ conn.end_stream(self._h2_stream)
+ except TypeError:
+ raise TypeError(
+ "`data` should be str, bytes, iterable, or file. got %r"
+ % type(data)
+ )
+
+ def set_tunnel(
+ self,
+ host: str,
+ port: int | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ scheme: str = "http",
+ ) -> None:
+ raise NotImplementedError(
+ "HTTP/2 does not support setting up a tunnel through a proxy"
+ )
+
+ def getresponse( # type: ignore[override]
+ self,
+ ) -> HTTP2Response:
+ status = None
+ data = bytearray()
+ with self._h2_conn as conn:
+ end_stream = False
+ while not end_stream:
+ # TODO: Arbitrary read value.
+ if received_data := self.sock.recv(65535):
+ events = conn.receive_data(received_data)
+ for event in events:
+ if isinstance(event, h2.events.ResponseReceived):
+ headers = HTTPHeaderDict()
+ for header, value in event.headers:
+ if header == b":status":
+ status = int(value.decode())
+ else:
+ headers.add(
+ header.decode("ascii"), value.decode("ascii")
+ )
+
+ elif isinstance(event, h2.events.DataReceived):
+ data += event.data
+ conn.acknowledge_received_data(
+ event.flow_controlled_length, event.stream_id
+ )
+
+ elif isinstance(event, h2.events.StreamEnded):
+ end_stream = True
+
+ if data_to_send := conn.data_to_send():
+ self.sock.sendall(data_to_send)
+
+ assert status is not None
+ return HTTP2Response(
+ status=status,
+ headers=headers,
+ request_url=self._request_url,
+ data=bytes(data),
+ )
+
+ def request( # type: ignore[override]
+ self,
+ method: str,
+ url: str,
+ body: _TYPE_BODY | None = None,
+ headers: typing.Mapping[str, str] | None = None,
+ *,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ enforce_content_length: bool = True,
+ **kwargs: typing.Any,
+ ) -> None:
+ """Send an HTTP/2 request"""
+ if "chunked" in kwargs:
+ # TODO this is often present from upstream.
+ # raise NotImplementedError("`chunked` isn't supported with HTTP/2")
+ pass
+
+ if self.sock is not None:
+ self.sock.settimeout(self.timeout)
+
+ self.putrequest(method, url)
+
+ headers = headers or {}
+ for k, v in headers.items():
+ if k.lower() == "transfer-encoding" and v == "chunked":
+ continue
+ else:
+ self.putheader(k, v)
+
+ if b"user-agent" not in dict(self._headers):
+ self.putheader(b"user-agent", _get_default_user_agent())
+
+ if body:
+ self.endheaders(message_body=body)
+ self.send(body)
+ else:
+ self.endheaders()
+
+ def close(self) -> None:
+ with self._h2_conn as conn:
+ try:
+ conn.close_connection()
+ if data := conn.data_to_send():
+ self.sock.sendall(data)
+ except Exception:
+ pass
+
+ # Reset all our HTTP/2 connection state.
+ self._h2_conn = self._new_h2_conn()
+ self._h2_stream = None
+ self._headers = []
+
+ super().close()
+
+
+class HTTP2Response(BaseHTTPResponse):
+ # TODO: This is a woefully incomplete response object, but works for non-streaming.
+ def __init__(
+ self,
+ status: int,
+ headers: HTTPHeaderDict,
+ request_url: str,
+ data: bytes,
+ decode_content: bool = False, # TODO: support decoding
+ ) -> None:
+ super().__init__(
+ status=status,
+ headers=headers,
+ # Following CPython, we map HTTP versions to major * 10 + minor integers
+ version=20,
+ version_string="HTTP/2",
+ # No reason phrase in HTTP/2
+ reason=None,
+ decode_content=decode_content,
+ request_url=request_url,
+ )
+ self._data = data
+ self.length_remaining = 0
+
+ @property
+ def data(self) -> bytes:
+ return self._data
+
+ def get_redirect_location(self) -> None:
+ return None
+
+ def close(self) -> None:
+ pass
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/probe.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/probe.py"
new file mode 100644
index 0000000..9ea9007
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/http2/probe.py"
@@ -0,0 +1,87 @@
+from __future__ import annotations
+
+import threading
+
+
+class _HTTP2ProbeCache:
+ __slots__ = (
+ "_lock",
+ "_cache_locks",
+ "_cache_values",
+ )
+
+ def __init__(self) -> None:
+ self._lock = threading.Lock()
+ self._cache_locks: dict[tuple[str, int], threading.RLock] = {}
+ self._cache_values: dict[tuple[str, int], bool | None] = {}
+
+ def acquire_and_get(self, host: str, port: int) -> bool | None:
+ # By the end of this block we know that
+ # _cache_[values,locks] is available.
+ value = None
+ with self._lock:
+ key = (host, port)
+ try:
+ value = self._cache_values[key]
+ # If it's a known value we return right away.
+ if value is not None:
+ return value
+ except KeyError:
+ self._cache_locks[key] = threading.RLock()
+ self._cache_values[key] = None
+
+ # If the value is unknown, we acquire the lock to signal
+ # to the requesting thread that the probe is in progress
+ # or that the current thread needs to return their findings.
+ key_lock = self._cache_locks[key]
+ key_lock.acquire()
+ try:
+ # If the by the time we get the lock the value has been
+ # updated we want to return the updated value.
+ value = self._cache_values[key]
+
+ # In case an exception like KeyboardInterrupt is raised here.
+ except BaseException as e: # Defensive:
+ assert not isinstance(e, KeyError) # KeyError shouldn't be possible.
+ key_lock.release()
+ raise
+
+ return value
+
+ def set_and_release(
+ self, host: str, port: int, supports_http2: bool | None
+ ) -> None:
+ key = (host, port)
+ key_lock = self._cache_locks[key]
+ with key_lock: # Uses an RLock, so can be locked again from same thread.
+ if supports_http2 is None and self._cache_values[key] is not None:
+ raise ValueError(
+ "Cannot reset HTTP/2 support for origin after value has been set."
+ ) # Defensive: not expected in normal usage
+
+ self._cache_values[key] = supports_http2
+ key_lock.release()
+
+ def _values(self) -> dict[tuple[str, int], bool | None]:
+ """This function is for testing purposes only. Gets the current state of the probe cache"""
+ with self._lock:
+ return {k: v for k, v in self._cache_values.items()}
+
+ def _reset(self) -> None:
+ """This function is for testing purposes only. Reset the cache values"""
+ with self._lock:
+ self._cache_locks = {}
+ self._cache_values = {}
+
+
+_HTTP2_PROBE_CACHE = _HTTP2ProbeCache()
+
+set_and_release = _HTTP2_PROBE_CACHE.set_and_release
+acquire_and_get = _HTTP2_PROBE_CACHE.acquire_and_get
+_values = _HTTP2_PROBE_CACHE._values
+_reset = _HTTP2_PROBE_CACHE._reset
+
+__all__ = [
+ "set_and_release",
+ "acquire_and_get",
+]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/poolmanager.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/poolmanager.py"
new file mode 100644
index 0000000..28ec82f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/poolmanager.py"
@@ -0,0 +1,651 @@
+from __future__ import annotations
+
+import functools
+import logging
+import typing
+import warnings
+from types import TracebackType
+from urllib.parse import urljoin
+
+from ._collections import HTTPHeaderDict, RecentlyUsedContainer
+from ._request_methods import RequestMethods
+from .connection import ProxyConfig
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
+from .exceptions import (
+ LocationValueError,
+ MaxRetryError,
+ ProxySchemeUnknown,
+ URLSchemeUnknown,
+)
+from .response import BaseHTTPResponse
+from .util.connection import _TYPE_SOCKET_OPTIONS
+from .util.proxy import connection_requires_http_tunnel
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import Url, parse_url
+
+if typing.TYPE_CHECKING:
+ import ssl
+
+ from typing_extensions import Self
+
+__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
+
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = (
+ "key_file",
+ "cert_file",
+ "cert_reqs",
+ "ca_certs",
+ "ca_cert_data",
+ "ssl_version",
+ "ssl_minimum_version",
+ "ssl_maximum_version",
+ "ca_cert_dir",
+ "ssl_context",
+ "key_password",
+ "server_hostname",
+)
+# Default value for `blocksize` - a new parameter introduced to
+# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7
+_DEFAULT_BLOCKSIZE = 16384
+
+
+class PoolKey(typing.NamedTuple):
+ """
+ All known keyword arguments that could be provided to the pool manager, its
+ pools, or the underlying connections.
+
+ All custom key schemes should include the fields in this key at a minimum.
+ """
+
+ key_scheme: str
+ key_host: str
+ key_port: int | None
+ key_timeout: Timeout | float | int | None
+ key_retries: Retry | bool | int | None
+ key_block: bool | None
+ key_source_address: tuple[str, int] | None
+ key_key_file: str | None
+ key_key_password: str | None
+ key_cert_file: str | None
+ key_cert_reqs: str | None
+ key_ca_certs: str | None
+ key_ca_cert_data: str | bytes | None
+ key_ssl_version: int | str | None
+ key_ssl_minimum_version: ssl.TLSVersion | None
+ key_ssl_maximum_version: ssl.TLSVersion | None
+ key_ca_cert_dir: str | None
+ key_ssl_context: ssl.SSLContext | None
+ key_maxsize: int | None
+ key_headers: frozenset[tuple[str, str]] | None
+ key__proxy: Url | None
+ key__proxy_headers: frozenset[tuple[str, str]] | None
+ key__proxy_config: ProxyConfig | None
+ key_socket_options: _TYPE_SOCKET_OPTIONS | None
+ key__socks_options: frozenset[tuple[str, str]] | None
+ key_assert_hostname: bool | str | None
+ key_assert_fingerprint: str | None
+ key_server_hostname: str | None
+ key_blocksize: int | None
+
+
+def _default_key_normalizer(
+ key_class: type[PoolKey], request_context: dict[str, typing.Any]
+) -> PoolKey:
+ """
+ Create a pool key out of a request context dictionary.
+
+ According to RFC 3986, both the scheme and host are case-insensitive.
+ Therefore, this function normalizes both before constructing the pool
+ key for an HTTPS request. If you wish to change this behaviour, provide
+ alternate callables to ``key_fn_by_scheme``.
+
+ :param key_class:
+ The class to use when constructing the key. This should be a namedtuple
+ with the ``scheme`` and ``host`` keys at a minimum.
+ :type key_class: namedtuple
+ :param request_context:
+ A dictionary-like object that contain the context for a request.
+ :type request_context: dict
+
+ :return: A namedtuple that can be used as a connection pool key.
+ :rtype: PoolKey
+ """
+ # Since we mutate the dictionary, make a copy first
+ context = request_context.copy()
+ context["scheme"] = context["scheme"].lower()
+ context["host"] = context["host"].lower()
+
+ # These are both dictionaries and need to be transformed into frozensets
+ for key in ("headers", "_proxy_headers", "_socks_options"):
+ if key in context and context[key] is not None:
+ context[key] = frozenset(context[key].items())
+
+ # The socket_options key may be a list and needs to be transformed into a
+ # tuple.
+ socket_opts = context.get("socket_options")
+ if socket_opts is not None:
+ context["socket_options"] = tuple(socket_opts)
+
+ # Map the kwargs to the names in the namedtuple - this is necessary since
+ # namedtuples can't have fields starting with '_'.
+ for key in list(context.keys()):
+ context["key_" + key] = context.pop(key)
+
+ # Default to ``None`` for keys missing from the context
+ for field in key_class._fields:
+ if field not in context:
+ context[field] = None
+
+ # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context
+ if context.get("key_blocksize") is None:
+ context["key_blocksize"] = _DEFAULT_BLOCKSIZE
+
+ return key_class(**context)
+
+
+#: A dictionary that maps a scheme to a callable that creates a pool key.
+#: This can be used to alter the way pool keys are constructed, if desired.
+#: Each PoolManager makes a copy of this dictionary so they can be configured
+#: globally here, or individually on the instance.
+key_fn_by_scheme = {
+ "http": functools.partial(_default_key_normalizer, PoolKey),
+ "https": functools.partial(_default_key_normalizer, PoolKey),
+}
+
+pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least
+ recently used pool.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param \\**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example:
+
+ .. code-block:: python
+
+ import urllib3
+
+ http = urllib3.PoolManager(num_pools=2)
+
+ resp1 = http.request("GET", "https://google.com/")
+ resp2 = http.request("GET", "https://google.com/mail")
+ resp3 = http.request("GET", "https://yahoo.com/")
+
+ print(len(http.pools))
+ # 2
+
+ """
+
+ proxy: Url | None = None
+ proxy_config: ProxyConfig | None = None
+
+ def __init__(
+ self,
+ num_pools: int = 10,
+ headers: typing.Mapping[str, str] | None = None,
+ **connection_pool_kw: typing.Any,
+ ) -> None:
+ super().__init__(headers)
+ # PoolManager handles redirects itself in PoolManager.urlopen().
+ # It always passes redirect=False to the underlying connection pool to
+ # suppress per-pool redirect handling. If the user supplied a non-Retry
+ # value (int/bool/etc) for retries and we let the pool normalize it
+ # while redirect=False, the resulting Retry object would have redirect
+ # handling disabled, which can interfere with PoolManager's own
+ # redirect logic. Normalize here so redirects remain governed solely by
+ # PoolManager logic.
+ if "retries" in connection_pool_kw:
+ retries = connection_pool_kw["retries"]
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries)
+ connection_pool_kw = connection_pool_kw.copy()
+ connection_pool_kw["retries"] = retries
+ self.connection_pool_kw = connection_pool_kw
+
+ self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool]
+ self.pools = RecentlyUsedContainer(num_pools)
+
+ # Locally set the pool classes and keys so other PoolManagers can
+ # override them.
+ self.pool_classes_by_scheme = pool_classes_by_scheme
+ self.key_fn_by_scheme = key_fn_by_scheme.copy()
+
+ def __enter__(self) -> Self:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> typing.Literal[False]:
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def _new_pool(
+ self,
+ scheme: str,
+ host: str,
+ port: int,
+ request_context: dict[str, typing.Any] | None = None,
+ ) -> HTTPConnectionPool:
+ """
+ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
+ any additional pool keyword arguments.
+
+ If ``request_context`` is provided, it is provided as keyword arguments
+ to the pool class used. This method is used to actually create the
+ connection pools handed out by :meth:`connection_from_url` and
+ companion methods. It is intended to be overridden for customization.
+ """
+ pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme]
+ if request_context is None:
+ request_context = self.connection_pool_kw.copy()
+
+ # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly
+ # set to 'None' in the request_context.
+ if request_context.get("blocksize") is None:
+ request_context["blocksize"] = _DEFAULT_BLOCKSIZE
+
+ # Although the context has everything necessary to create the pool,
+ # this function has historically only used the scheme, host, and port
+ # in the positional args. When an API change is acceptable these can
+ # be removed.
+ for key in ("scheme", "host", "port"):
+ request_context.pop(key, None)
+
+ if scheme == "http":
+ for kw in SSL_KEYWORDS:
+ request_context.pop(kw, None)
+
+ return pool_cls(host, port, **request_context)
+
+ def clear(self) -> None:
+ """
+ Empty our store of pools and direct them all to close.
+
+ This will not affect in-flight connections, but they will not be
+ re-used after completion.
+ """
+ self.pools.clear()
+
+ def connection_from_host(
+ self,
+ host: str | None,
+ port: int | None = None,
+ scheme: str | None = "http",
+ pool_kwargs: dict[str, typing.Any] | None = None,
+ ) -> HTTPConnectionPool:
+ """
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
+
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
+ provided, it is merged with the instance's ``connection_pool_kw``
+ variable and used to create the new connection pool, if one is
+ needed.
+ """
+
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ request_context = self._merge_pool_kwargs(pool_kwargs)
+ request_context["scheme"] = scheme or "http"
+ if not port:
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
+ request_context["port"] = port
+ request_context["host"] = host
+
+ return self.connection_from_context(request_context)
+
+ def connection_from_context(
+ self, request_context: dict[str, typing.Any]
+ ) -> HTTPConnectionPool:
+ """
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
+
+ ``request_context`` must at least contain the ``scheme`` key and its
+ value must be a key in ``key_fn_by_scheme`` instance variable.
+ """
+ if "strict" in request_context:
+ warnings.warn(
+ "The 'strict' parameter is no longer needed on Python 3+. "
+ "This will raise an error in urllib3 v2.1.0.",
+ DeprecationWarning,
+ )
+ request_context.pop("strict")
+
+ scheme = request_context["scheme"].lower()
+ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
+ if not pool_key_constructor:
+ raise URLSchemeUnknown(scheme)
+ pool_key = pool_key_constructor(request_context)
+
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
+
+ def connection_from_pool_key(
+ self, pool_key: PoolKey, request_context: dict[str, typing.Any]
+ ) -> HTTPConnectionPool:
+ """
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
+
+ ``pool_key`` should be a namedtuple that only contains immutable
+ objects. At a minimum it must have the ``scheme``, ``host``, and
+ ``port`` fields.
+ """
+ with self.pools.lock:
+ # If the scheme, host, or port doesn't match existing open
+ # connections, open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ scheme = request_context["scheme"]
+ host = request_context["host"]
+ port = request_context["port"]
+ pool = self._new_pool(scheme, host, port, request_context=request_context)
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(
+ self, url: str, pool_kwargs: dict[str, typing.Any] | None = None
+ ) -> HTTPConnectionPool:
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url`.
+
+ If ``pool_kwargs`` is not provided and a new pool needs to be
+ constructed, ``self.connection_pool_kw`` is used to initialize
+ the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
+ is provided, it is used instead. Note that if a new pool does not
+ need to be created for the request, the provided ``pool_kwargs`` are
+ not used.
+ """
+ u = parse_url(url)
+ return self.connection_from_host(
+ u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
+ )
+
+ def _merge_pool_kwargs(
+ self, override: dict[str, typing.Any] | None
+ ) -> dict[str, typing.Any]:
+ """
+ Merge a dictionary of override values for self.connection_pool_kw.
+
+ This does not modify self.connection_pool_kw and returns a new dict.
+ Any keys in the override dictionary with a value of ``None`` are
+ removed from the merged dictionary.
+ """
+ base_pool_kwargs = self.connection_pool_kw.copy()
+ if override:
+ for key, value in override.items():
+ if value is None:
+ try:
+ del base_pool_kwargs[key]
+ except KeyError:
+ pass
+ else:
+ base_pool_kwargs[key] = value
+ return base_pool_kwargs
+
+ def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool:
+ """
+ Indicates if the proxy requires the complete destination URL in the
+ request. Normally this is only needed when not using an HTTP CONNECT
+ tunnel.
+ """
+ if self.proxy is None:
+ return False
+
+ return not connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, parsed_url.scheme
+ )
+
+ def urlopen( # type: ignore[override]
+ self, method: str, url: str, redirect: bool = True, **kw: typing.Any
+ ) -> BaseHTTPResponse:
+ """
+ Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
+ with custom cross-host redirect logic and only sends the request-uri
+ portion of the ``url``.
+
+ The given ``url`` parameter must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ u = parse_url(url)
+
+ if u.scheme is None:
+ warnings.warn(
+ "URLs without a scheme (ie 'https://') are deprecated and will raise an error "
+ "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs "
+ "start with 'https://' or 'http://'. Read more in this issue: "
+ "https://github.com/urllib3/urllib3/issues/2920",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ kw["assert_same_host"] = False
+ kw["redirect"] = False
+
+ if "headers" not in kw:
+ kw["headers"] = self.headers
+
+ if self._proxy_requires_url_absolute_form(u):
+ response = conn.urlopen(method, url, **kw)
+ else:
+ response = conn.urlopen(method, u.request_uri, **kw)
+
+ redirect_location = redirect and response.get_redirect_location()
+ if not redirect_location:
+ return response
+
+ # Support relative URLs for redirecting.
+ redirect_location = urljoin(url, redirect_location)
+
+ if response.status == 303:
+ # Change the method according to RFC 9110, Section 15.4.4.
+ method = "GET"
+ # And lose the body not to transfer anything sensitive.
+ kw["body"] = None
+ kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
+
+ retries = kw.get("retries", response.retries)
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ # Strip headers marked as unsafe to forward to the redirected location.
+ # Check remove_headers_on_redirect to avoid a potential network call within
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
+ if retries.remove_headers_on_redirect and not conn.is_same_host(
+ redirect_location
+ ):
+ new_headers = kw["headers"].copy()
+ for header in kw["headers"]:
+ if header.lower() in retries.remove_headers_on_redirect:
+ new_headers.pop(header, None)
+ kw["headers"] = new_headers
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ response.drain_conn()
+ raise
+ return response
+
+ kw["retries"] = retries
+ kw["redirect"] = redirect
+
+ log.info("Redirecting %s -> %s", url, redirect_location)
+
+ response.drain_conn()
+ return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+ """
+ Behaves just like :class:`PoolManager`, but sends all requests through
+ the defined proxy, using the CONNECT method for HTTPS URLs.
+
+ :param proxy_url:
+ The URL of the proxy to be used.
+
+ :param proxy_headers:
+ A dictionary containing headers that will be sent to the proxy. In case
+ of HTTP they are being sent with each request, while in the
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
+ authentication.
+
+ :param proxy_ssl_context:
+ The proxy SSL context is used to establish the TLS connection to the
+ proxy when using HTTPS proxies.
+
+ :param use_forwarding_for_https:
+ (Defaults to False) If set to True will forward requests to the HTTPS
+ proxy to be made on behalf of the client instead of creating a TLS
+ tunnel via the CONNECT method. **Enabling this flag means that request
+ and response headers and content will be visible from the HTTPS proxy**
+ whereas tunneling keeps request and response headers and content
+ private. IP address, target hostname, SNI, and port are always visible
+ to an HTTPS proxy even when this flag is disabled.
+
+ :param proxy_assert_hostname:
+ The hostname of the certificate to verify against.
+
+ :param proxy_assert_fingerprint:
+ The fingerprint of the certificate to verify against.
+
+ Example:
+
+ .. code-block:: python
+
+ import urllib3
+
+ proxy = urllib3.ProxyManager("https://localhost:3128/")
+
+ resp1 = proxy.request("GET", "https://google.com/")
+ resp2 = proxy.request("GET", "https://httpbin.org/")
+
+ print(len(proxy.pools))
+ # 1
+
+ resp3 = proxy.request("GET", "https://httpbin.org/")
+ resp4 = proxy.request("GET", "https://twitter.com/")
+
+ print(len(proxy.pools))
+ # 3
+
+ """
+
+ def __init__(
+ self,
+ proxy_url: str,
+ num_pools: int = 10,
+ headers: typing.Mapping[str, str] | None = None,
+ proxy_headers: typing.Mapping[str, str] | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
+ use_forwarding_for_https: bool = False,
+ proxy_assert_hostname: None | str | typing.Literal[False] = None,
+ proxy_assert_fingerprint: str | None = None,
+ **connection_pool_kw: typing.Any,
+ ) -> None:
+ if isinstance(proxy_url, HTTPConnectionPool):
+ str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}"
+ else:
+ str_proxy_url = proxy_url
+ proxy = parse_url(str_proxy_url)
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
+ self.proxy = proxy
+ self.proxy_headers = proxy_headers or {}
+ self.proxy_ssl_context = proxy_ssl_context
+ self.proxy_config = ProxyConfig(
+ proxy_ssl_context,
+ use_forwarding_for_https,
+ proxy_assert_hostname,
+ proxy_assert_fingerprint,
+ )
+
+ connection_pool_kw["_proxy"] = self.proxy
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
+ connection_pool_kw["_proxy_config"] = self.proxy_config
+
+ super().__init__(num_pools, headers, **connection_pool_kw)
+
+ def connection_from_host(
+ self,
+ host: str | None,
+ port: int | None = None,
+ scheme: str | None = "http",
+ pool_kwargs: dict[str, typing.Any] | None = None,
+ ) -> HTTPConnectionPool:
+ if scheme == "https":
+ return super().connection_from_host(
+ host, port, scheme, pool_kwargs=pool_kwargs
+ )
+
+ return super().connection_from_host(
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr]
+ )
+
+ def _set_proxy_headers(
+ self, url: str, headers: typing.Mapping[str, str] | None = None
+ ) -> typing.Mapping[str, str]:
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
+ headers_ = {"Accept": "*/*"}
+
+ netloc = parse_url(url).netloc
+ if netloc:
+ headers_["Host"] = netloc
+
+ if headers:
+ headers_.update(headers)
+ return headers_
+
+ def urlopen( # type: ignore[override]
+ self, method: str, url: str, redirect: bool = True, **kw: typing.Any
+ ) -> BaseHTTPResponse:
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ u = parse_url(url)
+ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
+ # For connections using HTTP CONNECT, httplib sets the necessary
+ # headers on the CONNECT to the proxy. If we're not using CONNECT,
+ # we'll definitely need to set 'Host' at the very least.
+ headers = kw.get("headers", self.headers)
+ kw["headers"] = self._set_proxy_headers(url, headers)
+
+ return super().urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager:
+ return ProxyManager(proxy_url=url, **kw)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/py.typed"
new file mode 100644
index 0000000..5f3ea3d
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/py.typed"
@@ -0,0 +1,2 @@
+# Instruct type checkers to look for inline type annotations in this package.
+# See PEP 561.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/response.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/response.py"
new file mode 100644
index 0000000..f6266f1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/response.py"
@@ -0,0 +1,1476 @@
+from __future__ import annotations
+
+import collections
+import io
+import json as _json
+import logging
+import socket
+import sys
+import typing
+import warnings
+import zlib
+from contextlib import contextmanager
+from http.client import HTTPMessage as _HttplibHTTPMessage
+from http.client import HTTPResponse as _HttplibHTTPResponse
+from socket import timeout as SocketTimeout
+
+if typing.TYPE_CHECKING:
+ from ._base_connection import BaseHTTPConnection
+
+try:
+ try:
+ import brotlicffi as brotli # type: ignore[import-not-found]
+ except ImportError:
+ import brotli # type: ignore[import-not-found]
+except ImportError:
+ brotli = None
+
+from . import util
+from ._base_connection import _TYPE_BODY
+from ._collections import HTTPHeaderDict
+from .connection import BaseSSLError, HTTPConnection, HTTPException
+from .exceptions import (
+ BodyNotHttplibCompatible,
+ DecodeError,
+ DependencyWarning,
+ HTTPError,
+ IncompleteRead,
+ InvalidChunkLength,
+ InvalidHeader,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseNotChunked,
+ SSLError,
+)
+from .util.response import is_fp_closed, is_response_to_head
+from .util.retry import Retry
+
+if typing.TYPE_CHECKING:
+ from .connectionpool import HTTPConnectionPool
+
+log = logging.getLogger(__name__)
+
+
+class ContentDecoder:
+ def decompress(self, data: bytes, max_length: int = -1) -> bytes:
+ raise NotImplementedError()
+
+ @property
+ def has_unconsumed_tail(self) -> bool:
+ raise NotImplementedError()
+
+ def flush(self) -> bytes:
+ raise NotImplementedError()
+
+
+class DeflateDecoder(ContentDecoder):
+ def __init__(self) -> None:
+ self._first_try = True
+ self._first_try_data = b""
+ self._unfed_data = b""
+ self._obj = zlib.decompressobj()
+
+ def decompress(self, data: bytes, max_length: int = -1) -> bytes:
+ data = self._unfed_data + data
+ self._unfed_data = b""
+ if not data and not self._obj.unconsumed_tail:
+ return data
+ original_max_length = max_length
+ if original_max_length < 0:
+ max_length = 0
+ elif original_max_length == 0:
+ # We should not pass 0 to the zlib decompressor because 0 is
+ # the default value that will make zlib decompress without a
+ # length limit.
+ # Data should be stored for subsequent calls.
+ self._unfed_data = data
+ return b""
+
+ # Subsequent calls always reuse `self._obj`. zlib requires
+ # passing the unconsumed tail if decompression is to continue.
+ if not self._first_try:
+ return self._obj.decompress(
+ self._obj.unconsumed_tail + data, max_length=max_length
+ )
+
+ # First call tries with RFC 1950 ZLIB format.
+ self._first_try_data += data
+ try:
+ decompressed = self._obj.decompress(data, max_length=max_length)
+ if decompressed:
+ self._first_try = False
+ self._first_try_data = b""
+ return decompressed
+ # On failure, it falls back to RFC 1951 DEFLATE format.
+ except zlib.error:
+ self._first_try = False
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+ try:
+ return self.decompress(
+ self._first_try_data, max_length=original_max_length
+ )
+ finally:
+ self._first_try_data = b""
+
+ @property
+ def has_unconsumed_tail(self) -> bool:
+ return bool(self._unfed_data) or (
+ bool(self._obj.unconsumed_tail) and not self._first_try
+ )
+
+ def flush(self) -> bytes:
+ return self._obj.flush()
+
+
+class GzipDecoderState:
+ FIRST_MEMBER = 0
+ OTHER_MEMBERS = 1
+ SWALLOW_DATA = 2
+
+
+class GzipDecoder(ContentDecoder):
+ def __init__(self) -> None:
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+ self._state = GzipDecoderState.FIRST_MEMBER
+ self._unconsumed_tail = b""
+
+ def decompress(self, data: bytes, max_length: int = -1) -> bytes:
+ ret = bytearray()
+ if self._state == GzipDecoderState.SWALLOW_DATA:
+ return bytes(ret)
+
+ if max_length == 0:
+ # We should not pass 0 to the zlib decompressor because 0 is
+ # the default value that will make zlib decompress without a
+ # length limit.
+ # Data should be stored for subsequent calls.
+ self._unconsumed_tail += data
+ return b""
+
+ # zlib requires passing the unconsumed tail to the subsequent
+ # call if decompression is to continue.
+ data = self._unconsumed_tail + data
+ if not data and self._obj.eof:
+ return bytes(ret)
+
+ while True:
+ try:
+ ret += self._obj.decompress(
+ data, max_length=max(max_length - len(ret), 0)
+ )
+ except zlib.error:
+ previous_state = self._state
+ # Ignore data after the first error
+ self._state = GzipDecoderState.SWALLOW_DATA
+ self._unconsumed_tail = b""
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
+ # Allow trailing garbage acceptable in other gzip clients
+ return bytes(ret)
+ raise
+
+ self._unconsumed_tail = data = (
+ self._obj.unconsumed_tail or self._obj.unused_data
+ )
+ if max_length > 0 and len(ret) >= max_length:
+ break
+
+ if not data:
+ return bytes(ret)
+ # When the end of a gzip member is reached, a new decompressor
+ # must be created for unused (possibly future) data.
+ if self._obj.eof:
+ self._state = GzipDecoderState.OTHER_MEMBERS
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+ return bytes(ret)
+
+ @property
+ def has_unconsumed_tail(self) -> bool:
+ return bool(self._unconsumed_tail)
+
+ def flush(self) -> bytes:
+ return self._obj.flush()
+
+
+if brotli is not None:
+
+ class BrotliDecoder(ContentDecoder):
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self) -> None:
+ self._obj = brotli.Decompressor()
+ if hasattr(self._obj, "decompress"):
+ setattr(self, "_decompress", self._obj.decompress)
+ else:
+ setattr(self, "_decompress", self._obj.process)
+
+ # Requires Brotli >= 1.2.0 for `output_buffer_limit`.
+ def _decompress(self, data: bytes, output_buffer_limit: int = -1) -> bytes:
+ raise NotImplementedError()
+
+ def decompress(self, data: bytes, max_length: int = -1) -> bytes:
+ try:
+ if max_length > 0:
+ return self._decompress(data, output_buffer_limit=max_length)
+ else:
+ return self._decompress(data)
+ except TypeError:
+ # Fallback for Brotli/brotlicffi/brotlipy versions without
+ # the `output_buffer_limit` parameter.
+ warnings.warn(
+ "Brotli >= 1.2.0 is required to prevent decompression bombs.",
+ DependencyWarning,
+ )
+ return self._decompress(data)
+
+ @property
+ def has_unconsumed_tail(self) -> bool:
+ try:
+ return not self._obj.can_accept_more_data()
+ except AttributeError:
+ return False
+
+ def flush(self) -> bytes:
+ if hasattr(self._obj, "flush"):
+ return self._obj.flush() # type: ignore[no-any-return]
+ return b""
+
+
+try:
+ if sys.version_info >= (3, 14):
+ from compression import zstd
+ else:
+ from backports import zstd
+except ImportError:
+ HAS_ZSTD = False
+else:
+ HAS_ZSTD = True
+
+ class ZstdDecoder(ContentDecoder):
+ def __init__(self) -> None:
+ self._obj = zstd.ZstdDecompressor()
+
+ def decompress(self, data: bytes, max_length: int = -1) -> bytes:
+ if not data and not self.has_unconsumed_tail:
+ return b""
+ if self._obj.eof:
+ data = self._obj.unused_data + data
+ self._obj = zstd.ZstdDecompressor()
+ part = self._obj.decompress(data, max_length=max_length)
+ length = len(part)
+ data_parts = [part]
+ # Every loop iteration is supposed to read data from a separate frame.
+ # The loop breaks when:
+ # - enough data is read;
+ # - no more unused data is available;
+ # - end of the last read frame has not been reached (i.e.,
+ # more data has to be fed).
+ while (
+ self._obj.eof
+ and self._obj.unused_data
+ and (max_length < 0 or length < max_length)
+ ):
+ unused_data = self._obj.unused_data
+ if not self._obj.needs_input:
+ self._obj = zstd.ZstdDecompressor()
+ part = self._obj.decompress(
+ unused_data,
+ max_length=(max_length - length) if max_length > 0 else -1,
+ )
+ if part_length := len(part):
+ data_parts.append(part)
+ length += part_length
+ elif self._obj.needs_input:
+ break
+ return b"".join(data_parts)
+
+ @property
+ def has_unconsumed_tail(self) -> bool:
+ return not (self._obj.needs_input or self._obj.eof) or bool(
+ self._obj.unused_data
+ )
+
+ def flush(self) -> bytes:
+ if not self._obj.eof:
+ raise DecodeError("Zstandard data is incomplete")
+ return b""
+
+
+class MultiDecoder(ContentDecoder):
+ """
+ From RFC7231:
+ If one or more encodings have been applied to a representation, the
+ sender that applied the encodings MUST generate a Content-Encoding
+ header field that lists the content codings in the order in which
+ they were applied.
+ """
+
+ # Maximum allowed number of chained HTTP encodings in the
+ # Content-Encoding header.
+ max_decode_links = 5
+
+ def __init__(self, modes: str) -> None:
+ encodings = [m.strip() for m in modes.split(",")]
+ if len(encodings) > self.max_decode_links:
+ raise DecodeError(
+ "Too many content encodings in the chain: "
+ f"{len(encodings)} > {self.max_decode_links}"
+ )
+ self._decoders = [_get_decoder(e) for e in encodings]
+
+ def flush(self) -> bytes:
+ return self._decoders[0].flush()
+
+ def decompress(self, data: bytes, max_length: int = -1) -> bytes:
+ if max_length <= 0:
+ for d in reversed(self._decoders):
+ data = d.decompress(data)
+ return data
+
+ ret = bytearray()
+ # Every while loop iteration goes through all decoders once.
+ # It exits when enough data is read or no more data can be read.
+ # It is possible that the while loop iteration does not produce
+ # any data because we retrieve up to `max_length` from every
+ # decoder, and the amount of bytes may be insufficient for the
+ # next decoder to produce enough/any output.
+ while True:
+ any_data = False
+ for d in reversed(self._decoders):
+ data = d.decompress(data, max_length=max_length - len(ret))
+ if data:
+ any_data = True
+ # We should not break when no data is returned because
+ # next decoders may produce data even with empty input.
+ ret += data
+ if not any_data or len(ret) >= max_length:
+ return bytes(ret)
+ data = b""
+
+ @property
+ def has_unconsumed_tail(self) -> bool:
+ return any(d.has_unconsumed_tail for d in self._decoders)
+
+
+def _get_decoder(mode: str) -> ContentDecoder:
+ if "," in mode:
+ return MultiDecoder(mode)
+
+ # According to RFC 9110 section 8.4.1.3, recipients should
+ # consider x-gzip equivalent to gzip
+ if mode in ("gzip", "x-gzip"):
+ return GzipDecoder()
+
+ if brotli is not None and mode == "br":
+ return BrotliDecoder()
+
+ if HAS_ZSTD and mode == "zstd":
+ return ZstdDecoder()
+
+ return DeflateDecoder()
+
+
+class BytesQueueBuffer:
+ """Memory-efficient bytes buffer
+
+ To return decoded data in read() and still follow the BufferedIOBase API, we need a
+ buffer to always return the correct amount of bytes.
+
+ This buffer should be filled using calls to put()
+
+ Our maximum memory usage is determined by the sum of the size of:
+
+ * self.buffer, which contains the full data
+ * the largest chunk that we will copy in get()
+ """
+
+ def __init__(self) -> None:
+ self.buffer: typing.Deque[bytes | memoryview[bytes]] = collections.deque()
+ self._size: int = 0
+
+ def __len__(self) -> int:
+ return self._size
+
+ def put(self, data: bytes) -> None:
+ self.buffer.append(data)
+ self._size += len(data)
+
+ def get(self, n: int) -> bytes:
+ if n == 0:
+ return b""
+ elif not self.buffer:
+ raise RuntimeError("buffer is empty")
+ elif n < 0:
+ raise ValueError("n should be > 0")
+
+ if len(self.buffer[0]) == n and isinstance(self.buffer[0], bytes):
+ self._size -= n
+ return self.buffer.popleft()
+
+ fetched = 0
+ ret = io.BytesIO()
+ while fetched < n:
+ remaining = n - fetched
+ chunk = self.buffer.popleft()
+ chunk_length = len(chunk)
+ if remaining < chunk_length:
+ chunk = memoryview(chunk)
+ left_chunk, right_chunk = chunk[:remaining], chunk[remaining:]
+ ret.write(left_chunk)
+ self.buffer.appendleft(right_chunk)
+ self._size -= remaining
+ break
+ else:
+ ret.write(chunk)
+ self._size -= chunk_length
+ fetched += chunk_length
+
+ if not self.buffer:
+ break
+
+ return ret.getvalue()
+
+ def get_all(self) -> bytes:
+ buffer = self.buffer
+ if not buffer:
+ assert self._size == 0
+ return b""
+ if len(buffer) == 1:
+ result = buffer.pop()
+ if isinstance(result, memoryview):
+ result = result.tobytes()
+ else:
+ ret = io.BytesIO()
+ ret.writelines(buffer.popleft() for _ in range(len(buffer)))
+ result = ret.getvalue()
+ self._size = 0
+ return result
+
+
+class BaseHTTPResponse(io.IOBase):
+ CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
+ if brotli is not None:
+ CONTENT_DECODERS += ["br"]
+ if HAS_ZSTD:
+ CONTENT_DECODERS += ["zstd"]
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error)
+ if brotli is not None:
+ DECODER_ERROR_CLASSES += (brotli.error,)
+
+ if HAS_ZSTD:
+ DECODER_ERROR_CLASSES += (zstd.ZstdError,)
+
+ def __init__(
+ self,
+ *,
+ headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
+ status: int,
+ version: int,
+ version_string: str,
+ reason: str | None,
+ decode_content: bool,
+ request_url: str | None,
+ retries: Retry | None = None,
+ ) -> None:
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type]
+ self.status = status
+ self.version = version
+ self.version_string = version_string
+ self.reason = reason
+ self.decode_content = decode_content
+ self._has_decoded_content = False
+ self._request_url: str | None = request_url
+ self.retries = retries
+
+ self.chunked = False
+ tr_enc = self.headers.get("transfer-encoding", "").lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ self._decoder: ContentDecoder | None = None
+ self.length_remaining: int | None
+
+ def get_redirect_location(self) -> str | None | typing.Literal[False]:
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get("location")
+ return False
+
+ @property
+ def data(self) -> bytes:
+ raise NotImplementedError()
+
+ def json(self) -> typing.Any:
+ """
+ Deserializes the body of the HTTP response as a Python object.
+
+ The body of the HTTP response must be encoded using UTF-8, as per
+ `RFC 8529 Section 8.1 <https://www.rfc-editor.org/rfc/rfc8259#section-8.1>`_.
+
+ To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to
+ your custom decoder instead.
+
+ If the body of the HTTP response is not decodable to UTF-8, a
+ `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a
+ valid JSON document, a `json.JSONDecodeError` will be raised.
+
+ Read more :ref:`here <json_content>`.
+
+ :returns: The body of the HTTP response as a Python object.
+ """
+ data = self.data.decode("utf-8")
+ return _json.loads(data)
+
+ @property
+ def url(self) -> str | None:
+ raise NotImplementedError()
+
+ @url.setter
+ def url(self, url: str | None) -> None:
+ raise NotImplementedError()
+
+ @property
+ def connection(self) -> BaseHTTPConnection | None:
+ raise NotImplementedError()
+
+ @property
+ def retries(self) -> Retry | None:
+ return self._retries
+
+ @retries.setter
+ def retries(self, retries: Retry | None) -> None:
+ # Override the request_url if retries has a redirect location.
+ if retries is not None and retries.history:
+ self.url = retries.history[-1].redirect_location
+ self._retries = retries
+
+ def stream(
+ self, amt: int | None = 2**16, decode_content: bool | None = None
+ ) -> typing.Iterator[bytes]:
+ raise NotImplementedError()
+
+ def read(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ cache_content: bool = False,
+ ) -> bytes:
+ raise NotImplementedError()
+
+ def read1(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ ) -> bytes:
+ raise NotImplementedError()
+
+ def read_chunked(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ ) -> typing.Iterator[bytes]:
+ raise NotImplementedError()
+
+ def release_conn(self) -> None:
+ raise NotImplementedError()
+
+ def drain_conn(self) -> None:
+ raise NotImplementedError()
+
+ def shutdown(self) -> None:
+ raise NotImplementedError()
+
+ def close(self) -> None:
+ raise NotImplementedError()
+
+ def _init_decoder(self) -> None:
+ """
+ Set-up the _decoder attribute if necessary.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ if self._decoder is None:
+ if content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+ elif "," in content_encoding:
+ encodings = [
+ e.strip()
+ for e in content_encoding.split(",")
+ if e.strip() in self.CONTENT_DECODERS
+ ]
+ if encodings:
+ self._decoder = _get_decoder(content_encoding)
+
+ def _decode(
+ self,
+ data: bytes,
+ decode_content: bool | None,
+ flush_decoder: bool,
+ max_length: int | None = None,
+ ) -> bytes:
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
+ return data
+
+ if max_length is None or flush_decoder:
+ max_length = -1
+
+ try:
+ if self._decoder:
+ data = self._decoder.decompress(data, max_length=max_length)
+ self._has_decoded_content = True
+ except self.DECODER_ERROR_CLASSES as e:
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding,
+ e,
+ ) from e
+ if flush_decoder:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self) -> bytes:
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ return self._decoder.decompress(b"") + self._decoder.flush()
+ return b""
+
+ # Compatibility methods for `io` module
+ def readinto(self, b: bytearray) -> int:
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[: len(temp)] = temp
+ return len(temp)
+
+ # Methods used by dependent libraries
+ def getheaders(self) -> HTTPHeaderDict:
+ return self.headers
+
+ def getheader(self, name: str, default: str | None = None) -> str | None:
+ return self.headers.get(name, default)
+
+ # Compatibility method for http.cookiejar
+ def info(self) -> HTTPHeaderDict:
+ return self.headers
+
+ def geturl(self) -> str | None:
+ return self.url
+
+
+class HTTPResponse(BaseHTTPResponse):
+ """
+ HTTP Response container.
+
+ Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
+
+ Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+
+ :param retries:
+ The retries contains the last :class:`~urllib3.util.retry.Retry` that
+ was used during the request.
+
+ :param enforce_content_length:
+ Enforce content length checking. Body returned by server must match
+ value of Content-Length header, if present. Otherwise, raise error.
+ """
+
+ def __init__(
+ self,
+ body: _TYPE_BODY = "",
+ headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
+ status: int = 0,
+ version: int = 0,
+ version_string: str = "HTTP/?",
+ reason: str | None = None,
+ preload_content: bool = True,
+ decode_content: bool = True,
+ original_response: _HttplibHTTPResponse | None = None,
+ pool: HTTPConnectionPool | None = None,
+ connection: HTTPConnection | None = None,
+ msg: _HttplibHTTPMessage | None = None,
+ retries: Retry | None = None,
+ enforce_content_length: bool = True,
+ request_method: str | None = None,
+ request_url: str | None = None,
+ auto_close: bool = True,
+ sock_shutdown: typing.Callable[[int], None] | None = None,
+ ) -> None:
+ super().__init__(
+ headers=headers,
+ status=status,
+ version=version,
+ version_string=version_string,
+ reason=reason,
+ decode_content=decode_content,
+ request_url=request_url,
+ retries=retries,
+ )
+
+ self.enforce_content_length = enforce_content_length
+ self.auto_close = auto_close
+
+ self._body = None
+ self._fp: _HttplibHTTPResponse | None = None
+ self._original_response = original_response
+ self._fp_bytes_read = 0
+ self.msg = msg
+
+ if body and isinstance(body, (str, bytes)):
+ self._body = body
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, "read"):
+ self._fp = body # type: ignore[assignment]
+ self._sock_shutdown = sock_shutdown
+
+ # Are we using the chunked-style of transfer encoding?
+ self.chunk_left: int | None = None
+
+ # Determine length of response
+ self.length_remaining = self._init_length(request_method)
+
+ # Used to return the correct amount of bytes for partial read()s
+ self._decoded_buffer = BytesQueueBuffer()
+
+ # If requested, preload the body.
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def release_conn(self) -> None:
+ if not self._pool or not self._connection:
+ return None
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ def drain_conn(self) -> None:
+ """
+ Read and discard any remaining HTTP response data in the response connection.
+
+ Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
+ """
+ try:
+ self.read()
+ except (HTTPError, OSError, BaseSSLError, HTTPException):
+ pass
+
+ @property
+ def data(self) -> bytes:
+ # For backwards-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body # type: ignore[return-value]
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ return None # type: ignore[return-value]
+
+ @property
+ def connection(self) -> HTTPConnection | None:
+ return self._connection
+
+ def isclosed(self) -> bool:
+ return is_fp_closed(self._fp)
+
+ def tell(self) -> int:
+ """
+ Obtain the number of bytes pulled over the wire so far. May differ from
+ the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
+ if bytes are encoded on the wire (e.g, compressed).
+ """
+ return self._fp_bytes_read
+
+ def _init_length(self, request_method: str | None) -> int | None:
+ """
+ Set initial length value for Response content if available.
+ """
+ length: int | None
+ content_length: str | None = self.headers.get("content-length")
+
+ if content_length is not None:
+ if self.chunked:
+ # This Response will fail with an IncompleteRead if it can't be
+ # received as chunked. This method falls back to attempt reading
+ # the response before raising an exception.
+ log.warning(
+ "Received response with both Content-Length and "
+ "Transfer-Encoding set. This is expressly forbidden "
+ "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
+ "attempting to process response as Transfer-Encoding: "
+ "chunked."
+ )
+ return None
+
+ try:
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
+ # be sent in a single Content-Length header
+ # (e.g. Content-Length: 42, 42). This line ensures the values
+ # are all valid ints and that as long as the `set` length is 1,
+ # all values are the same. Otherwise, the header is invalid.
+ lengths = {int(val) for val in content_length.split(",")}
+ if len(lengths) > 1:
+ raise InvalidHeader(
+ "Content-Length contained multiple "
+ "unmatching values (%s)" % content_length
+ )
+ length = lengths.pop()
+ except ValueError:
+ length = None
+ else:
+ if length < 0:
+ length = None
+
+ else: # if content_length is None
+ length = None
+
+ # Convert status to int for comparison
+ # In some cases, httplib returns a status of "_UNKNOWN"
+ try:
+ status = int(self.status)
+ except ValueError:
+ status = 0
+
+ # Check for responses that shouldn't include a body
+ if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
+ length = 0
+
+ return length
+
+ @contextmanager
+ def _error_catcher(self) -> typing.Generator[None]:
+ """
+ Catch low-level python exceptions, instead re-raising urllib3
+ variants, so that low-level exceptions are not leaked in the
+ high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ clean_exit = False
+
+ try:
+ try:
+ yield
+
+ except SocketTimeout as e:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if "read operation timed out" not in str(e):
+ # SSL errors related to framing/MAC get wrapped and reraised here
+ raise SSLError(e) from e
+
+ raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
+
+ except IncompleteRead as e:
+ if (
+ e.expected is not None
+ and e.partial is not None
+ and e.expected == -e.partial
+ ):
+ arg = "Response may not contain content."
+ else:
+ arg = f"Connection broken: {e!r}"
+ raise ProtocolError(arg, e) from e
+
+ except (HTTPException, OSError) as e:
+ raise ProtocolError(f"Connection broken: {e!r}", e) from e
+
+ # If no exception is thrown, we should avoid cleaning up
+ # unnecessarily.
+ clean_exit = True
+ finally:
+ # If we didn't terminate cleanly, we need to throw away our
+ # connection.
+ if not clean_exit:
+ # The response may not be closed but we're not going to use it
+ # anymore so close it now to ensure that the connection is
+ # released back to the pool.
+ if self._original_response:
+ self._original_response.close()
+
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection:
+ self._connection.close()
+
+ # If we hold the original response but it's closed now, we should
+ # return the connection back to the pool.
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ def _fp_read(
+ self,
+ amt: int | None = None,
+ *,
+ read1: bool = False,
+ ) -> bytes:
+ """
+ Read a response with the thought that reading the number of bytes
+ larger than can fit in a 32-bit int at a time via SSL in some
+ known cases leads to an overflow error that has to be prevented
+ if `amt` or `self.length_remaining` indicate that a problem may
+ happen.
+
+ The known cases:
+ * CPython < 3.9.7 because of a bug
+ https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
+ * urllib3 injected with pyOpenSSL-backed SSL-support.
+ * CPython < 3.10 only when `amt` does not fit 32-bit int.
+ """
+ assert self._fp
+ c_int_max = 2**31 - 1
+ if (
+ (amt and amt > c_int_max)
+ or (
+ amt is None
+ and self.length_remaining
+ and self.length_remaining > c_int_max
+ )
+ ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)):
+ if read1:
+ return self._fp.read1(c_int_max)
+ buffer = io.BytesIO()
+ # Besides `max_chunk_amt` being a maximum chunk size, it
+ # affects memory overhead of reading a response by this
+ # method in CPython.
+ # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
+ # chunk size that does not lead to an overflow error, but
+ # 256 MiB is a compromise.
+ max_chunk_amt = 2**28
+ while amt is None or amt != 0:
+ if amt is not None:
+ chunk_amt = min(amt, max_chunk_amt)
+ amt -= chunk_amt
+ else:
+ chunk_amt = max_chunk_amt
+ data = self._fp.read(chunk_amt)
+ if not data:
+ break
+ buffer.write(data)
+ del data # to reduce peak memory usage by `max_chunk_amt`.
+ return buffer.getvalue()
+ elif read1:
+ return self._fp.read1(amt) if amt is not None else self._fp.read1()
+ else:
+ # StringIO doesn't like amt=None
+ return self._fp.read(amt) if amt is not None else self._fp.read()
+
+ def _raw_read(
+ self,
+ amt: int | None = None,
+ *,
+ read1: bool = False,
+ ) -> bytes:
+ """
+ Reads `amt` of bytes from the socket.
+ """
+ if self._fp is None:
+ return None # type: ignore[return-value]
+
+ fp_closed = getattr(self._fp, "closed", False)
+
+ with self._error_catcher():
+ data = self._fp_read(amt, read1=read1) if not fp_closed else b""
+ if amt is not None and amt != 0 and not data:
+ # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ if (
+ self.enforce_content_length
+ and self.length_remaining is not None
+ and self.length_remaining != 0
+ ):
+ # This is an edge case that httplib failed to cover due
+ # to concerns of backward compatibility. We're
+ # addressing it here to make sure IncompleteRead is
+ # raised during streaming, so all calls with incorrect
+ # Content-Length are caught.
+ raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+ elif read1 and (
+ (amt != 0 and not data) or self.length_remaining == len(data)
+ ):
+ # All data has been read, but `self._fp.read1` in
+ # CPython 3.12 and older doesn't always close
+ # `http.client.HTTPResponse`, so we close it here.
+ # See https://github.com/python/cpython/issues/113199
+ self._fp.close()
+
+ if data:
+ self._fp_bytes_read += len(data)
+ if self.length_remaining is not None:
+ self.length_remaining -= len(data)
+ return data
+
+ def read(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ cache_content: bool = False,
+ ) -> bytes:
+ """
+ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ self._init_decoder()
+ if decode_content is None:
+ decode_content = self.decode_content
+
+ if amt and amt < 0:
+ # Negative numbers and `None` should be treated the same.
+ amt = None
+ elif amt is not None:
+ cache_content = False
+
+ if self._decoder and self._decoder.has_unconsumed_tail:
+ decoded_data = self._decode(
+ b"",
+ decode_content,
+ flush_decoder=False,
+ max_length=amt - len(self._decoded_buffer),
+ )
+ self._decoded_buffer.put(decoded_data)
+ if len(self._decoded_buffer) >= amt:
+ return self._decoded_buffer.get(amt)
+
+ data = self._raw_read(amt)
+
+ flush_decoder = amt is None or (amt != 0 and not data)
+
+ if (
+ not data
+ and len(self._decoded_buffer) == 0
+ and not (self._decoder and self._decoder.has_unconsumed_tail)
+ ):
+ return data
+
+ if amt is None:
+ data = self._decode(data, decode_content, flush_decoder)
+ if cache_content:
+ self._body = data
+ else:
+ # do not waste memory on buffer when not decoding
+ if not decode_content:
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
+ return data
+
+ decoded_data = self._decode(
+ data,
+ decode_content,
+ flush_decoder,
+ max_length=amt - len(self._decoded_buffer),
+ )
+ self._decoded_buffer.put(decoded_data)
+
+ while len(self._decoded_buffer) < amt and data:
+ # TODO make sure to initially read enough data to get past the headers
+ # For example, the GZ file header takes 10 bytes, we don't want to read
+ # it one byte at a time
+ data = self._raw_read(amt)
+ decoded_data = self._decode(
+ data,
+ decode_content,
+ flush_decoder,
+ max_length=amt - len(self._decoded_buffer),
+ )
+ self._decoded_buffer.put(decoded_data)
+ data = self._decoded_buffer.get(amt)
+
+ return data
+
+ def read1(
+ self,
+ amt: int | None = None,
+ decode_content: bool | None = None,
+ ) -> bytes:
+ """
+ Similar to ``http.client.HTTPResponse.read1`` and documented
+ in :meth:`io.BufferedReader.read1`, but with an additional parameter:
+ ``decode_content``.
+
+ :param amt:
+ How much of the content to read.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if decode_content is None:
+ decode_content = self.decode_content
+ if amt and amt < 0:
+ # Negative numbers and `None` should be treated the same.
+ amt = None
+ # try and respond without going to the network
+ if self._has_decoded_content:
+ if not decode_content:
+ raise RuntimeError(
+ "Calling read1(decode_content=False) is not supported after "
+ "read1(decode_content=True) was called."
+ )
+ if (
+ self._decoder
+ and self._decoder.has_unconsumed_tail
+ and (amt is None or len(self._decoded_buffer) < amt)
+ ):
+ decoded_data = self._decode(
+ b"",
+ decode_content,
+ flush_decoder=False,
+ max_length=(
+ amt - len(self._decoded_buffer) if amt is not None else None
+ ),
+ )
+ self._decoded_buffer.put(decoded_data)
+ if len(self._decoded_buffer) > 0:
+ if amt is None:
+ return self._decoded_buffer.get_all()
+ return self._decoded_buffer.get(amt)
+ if amt == 0:
+ return b""
+
+ # FIXME, this method's type doesn't say returning None is possible
+ data = self._raw_read(amt, read1=True)
+ if not decode_content or data is None:
+ return data
+
+ self._init_decoder()
+ while True:
+ flush_decoder = not data
+ decoded_data = self._decode(
+ data, decode_content, flush_decoder, max_length=amt
+ )
+ self._decoded_buffer.put(decoded_data)
+ if decoded_data or flush_decoder:
+ break
+ data = self._raw_read(8192, read1=True)
+
+ if amt is None:
+ return self._decoded_buffer.get_all()
+ return self._decoded_buffer.get(amt)
+
+ def stream(
+ self, amt: int | None = 2**16, decode_content: bool | None = None
+ ) -> typing.Generator[bytes]:
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if self.chunked and self.supports_chunked_reads():
+ yield from self.read_chunked(amt, decode_content=decode_content)
+ else:
+ while (
+ not is_fp_closed(self._fp)
+ or len(self._decoded_buffer) > 0
+ or (self._decoder and self._decoder.has_unconsumed_tail)
+ ):
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+
+ # Overrides from io.IOBase
+ def readable(self) -> bool:
+ return True
+
+ def shutdown(self) -> None:
+ if not self._sock_shutdown:
+ raise ValueError("Cannot shutdown socket as self._sock_shutdown is not set")
+ if self._connection is None:
+ raise RuntimeError(
+ "Cannot shutdown as connection has already been released to the pool"
+ )
+ self._sock_shutdown(socket.SHUT_RD)
+
+ def close(self) -> None:
+ self._sock_shutdown = None
+
+ if not self.closed and self._fp:
+ self._fp.close()
+
+ if self._connection:
+ self._connection.close()
+
+ if not self.auto_close:
+ io.IOBase.close(self)
+
+ @property
+ def closed(self) -> bool:
+ if not self.auto_close:
+ return io.IOBase.closed.__get__(self) # type: ignore[no-any-return]
+ elif self._fp is None:
+ return True
+ elif hasattr(self._fp, "isclosed"):
+ return self._fp.isclosed()
+ elif hasattr(self._fp, "closed"):
+ return self._fp.closed
+ else:
+ return True
+
+ def fileno(self) -> int:
+ if self._fp is None:
+ raise OSError("HTTPResponse has no file to get a fileno from")
+ elif hasattr(self._fp, "fileno"):
+ return self._fp.fileno()
+ else:
+ raise OSError(
+ "The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor"
+ )
+
+ def flush(self) -> None:
+ if (
+ self._fp is not None
+ and hasattr(self._fp, "flush")
+ and not getattr(self._fp, "closed", False)
+ ):
+ return self._fp.flush()
+
+ def supports_chunked_reads(self) -> bool:
+ """
+ Checks if the underlying file-like object looks like a
+ :class:`http.client.HTTPResponse` object. We do this by testing for
+ the fp attribute. If it is present we assume it returns raw chunks as
+ processed by read_chunked().
+ """
+ return hasattr(self._fp, "fp")
+
+ def _update_chunk_length(self) -> None:
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return None
+ line = self._fp.fp.readline() # type: ignore[union-attr]
+ line = line.split(b";", 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ self.close()
+ if line:
+ # Invalid chunked protocol response, abort.
+ raise InvalidChunkLength(self, line) from None
+ else:
+ # Truncated at start of next chunk
+ raise ProtocolError("Response ended prematurely") from None
+
+ def _handle_chunk(self, amt: int | None) -> bytes:
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
+ returned_chunk = chunk
+ self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif self.chunk_left is not None and amt < self.chunk_left:
+ value = self._fp._safe_read(amt) # type: ignore[union-attr]
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt) # type: ignore[union-attr]
+ self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
+ self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk # type: ignore[no-any-return]
+
+ def read_chunked(
+ self, amt: int | None = None, decode_content: bool | None = None
+ ) -> typing.Generator[bytes]:
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked(
+ "Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing."
+ )
+ if not self.supports_chunked_reads():
+ raise BodyNotHttplibCompatible(
+ "Body should be http.client.HTTPResponse like. "
+ "It should have have an fp attribute which returns raw chunks."
+ )
+
+ with self._error_catcher():
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return None
+
+ # If a response is already read and closed
+ # then return immediately.
+ if self._fp.fp is None: # type: ignore[union-attr]
+ return None
+
+ if amt and amt < 0:
+ # Negative numbers and `None` should be treated the same,
+ # but httplib handles only `None` correctly.
+ amt = None
+
+ while True:
+ # First, check if any data is left in the decoder's buffer.
+ if self._decoder and self._decoder.has_unconsumed_tail:
+ chunk = b""
+ else:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ decoded = self._decode(
+ chunk,
+ decode_content=decode_content,
+ flush_decoder=False,
+ max_length=amt,
+ )
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
+
+ # Chunk content ends with \r\n: discard it.
+ while self._fp is not None:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b"\r\n":
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
+
+ @property
+ def url(self) -> str | None:
+ """
+ Returns the URL that was the source of this response.
+ If the request that generated this response redirected, this method
+ will return the final redirect location.
+ """
+ return self._request_url
+
+ @url.setter
+ def url(self, url: str | None) -> None:
+ self._request_url = url
+
+ def __iter__(self) -> typing.Iterator[bytes]:
+ buffer: list[bytes] = []
+ for chunk in self.stream(decode_content=True):
+ if b"\n" in chunk:
+ chunks = chunk.split(b"\n")
+ yield b"".join(buffer) + chunks[0] + b"\n"
+ for x in chunks[1:-1]:
+ yield x + b"\n"
+ if chunks[-1]:
+ buffer = [chunks[-1]]
+ else:
+ buffer = []
+ else:
+ buffer.append(chunk)
+ if buffer:
+ yield b"".join(buffer)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/__init__.py"
new file mode 100644
index 0000000..5341260
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/__init__.py"
@@ -0,0 +1,42 @@
+# For backwards compatibility, provide imports that used to be here.
+from __future__ import annotations
+
+from .connection import is_connection_dropped
+from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
+from .response import is_fp_closed
+from .retry import Retry
+from .ssl_ import (
+ ALPN_PROTOCOLS,
+ IS_PYOPENSSL,
+ SSLContext,
+ assert_fingerprint,
+ create_urllib3_context,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+from .timeout import Timeout
+from .url import Url, parse_url
+from .wait import wait_for_read, wait_for_write
+
+__all__ = (
+ "IS_PYOPENSSL",
+ "SSLContext",
+ "ALPN_PROTOCOLS",
+ "Retry",
+ "Timeout",
+ "Url",
+ "assert_fingerprint",
+ "create_urllib3_context",
+ "is_connection_dropped",
+ "is_fp_closed",
+ "parse_url",
+ "make_headers",
+ "resolve_cert_reqs",
+ "resolve_ssl_version",
+ "ssl_wrap_socket",
+ "wait_for_read",
+ "wait_for_write",
+ "SKIP_HEADER",
+ "SKIPPABLE_HEADERS",
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/connection.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/connection.py"
new file mode 100644
index 0000000..f92519e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/connection.py"
@@ -0,0 +1,137 @@
+from __future__ import annotations
+
+import socket
+import typing
+
+from ..exceptions import LocationParseError
+from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT
+
+_TYPE_SOCKET_OPTIONS = list[tuple[int, int, typing.Union[int, bytes]]]
+
+if typing.TYPE_CHECKING:
+ from .._base_connection import BaseHTTPConnection
+
+
+def is_connection_dropped(conn: BaseHTTPConnection) -> bool: # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+ :param conn: :class:`urllib3.connection.HTTPConnection` object.
+ """
+ return not conn.is_connected
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+# One additional modification is that we avoid binding to IPv6 servers
+# discovered in DNS if the system doesn't have IPv6 functionality.
+def create_connection(
+ address: tuple[str, int],
+ timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ source_address: tuple[str, int] | None = None,
+ socket_options: _TYPE_SOCKET_OPTIONS | None = None,
+) -> socket.socket:
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`socket.getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ if host.startswith("["):
+ host = host.strip("[]")
+ err = None
+
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+ # The original create_connection function always returns all records.
+ family = allowed_gai_family()
+
+ try:
+ host.encode("idna")
+ except UnicodeError:
+ raise LocationParseError(f"'{host}', label empty or too long") from None
+
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not _DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ # Break explicitly a reference cycle
+ err = None
+ return sock
+
+ except OSError as _:
+ err = _
+ if sock is not None:
+ sock.close()
+
+ if err is not None:
+ try:
+ raise err
+ finally:
+ # Break explicitly a reference cycle
+ err = None
+ else:
+ raise OSError("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(
+ sock: socket.socket, options: _TYPE_SOCKET_OPTIONS | None
+) -> None:
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
+
+
+def allowed_gai_family() -> socket.AddressFamily:
+ """This function is designed to work in the context of
+ getaddrinfo, where family=socket.AF_UNSPEC is the default and
+ will perform a DNS search for both IPv6 and IPv4 records."""
+
+ family = socket.AF_INET
+ if HAS_IPV6:
+ family = socket.AF_UNSPEC
+ return family
+
+
+def _has_ipv6(host: str) -> bool:
+ """Returns True if the system can bind an IPv6 address."""
+ sock = None
+ has_ipv6 = False
+
+ if socket.has_ipv6:
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
+ # It does not tell us if the system has IPv6 support enabled. To
+ # determine that we must bind to an IPv6 address.
+ # https://github.com/urllib3/urllib3/pull/611
+ # https://bugs.python.org/issue658327
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ sock.bind((host, 0))
+ has_ipv6 = True
+ except Exception:
+ pass
+
+ if sock:
+ sock.close()
+ return has_ipv6
+
+
+HAS_IPV6 = _has_ipv6("::1")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/proxy.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/proxy.py"
new file mode 100644
index 0000000..908fc66
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/proxy.py"
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+import typing
+
+from .url import Url
+
+if typing.TYPE_CHECKING:
+ from ..connection import ProxyConfig
+
+
+def connection_requires_http_tunnel(
+ proxy_url: Url | None = None,
+ proxy_config: ProxyConfig | None = None,
+ destination_scheme: str | None = None,
+) -> bool:
+ """
+ Returns True if the connection requires an HTTP CONNECT through the proxy.
+
+ :param URL proxy_url:
+ URL of the proxy.
+ :param ProxyConfig proxy_config:
+ Proxy configuration from poolmanager.py
+ :param str destination_scheme:
+ The scheme of the destination. (i.e https, http, etc)
+ """
+ # If we're not using a proxy, no way to use a tunnel.
+ if proxy_url is None:
+ return False
+
+ # HTTP destinations never require tunneling, we always forward.
+ if destination_scheme == "http":
+ return False
+
+ # Support for forwarding with HTTPS proxies and HTTPS destinations.
+ if (
+ proxy_url.scheme == "https"
+ and proxy_config
+ and proxy_config.use_forwarding_for_https
+ ):
+ return False
+
+ # Otherwise always use a tunnel.
+ return True
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/request.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/request.py"
new file mode 100644
index 0000000..6c2372b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/request.py"
@@ -0,0 +1,263 @@
+from __future__ import annotations
+
+import io
+import sys
+import typing
+from base64 import b64encode
+from enum import Enum
+
+from ..exceptions import UnrewindableBodyError
+from .util import to_bytes
+
+if typing.TYPE_CHECKING:
+ from typing import Final
+
+# Pass as a value within ``headers`` to skip
+# emitting some HTTP headers that are added automatically.
+# The only headers that are supported are ``Accept-Encoding``,
+# ``Host``, and ``User-Agent``.
+SKIP_HEADER = "@@@SKIP_HEADER@@@"
+SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+
+ACCEPT_ENCODING = "gzip,deflate"
+try:
+ try:
+ import brotlicffi as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401
+ except ImportError:
+ import brotli as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ",br"
+
+try:
+ if sys.version_info >= (3, 14):
+ from compression import zstd as _unused_module_zstd # noqa: F401
+ else:
+ from backports import zstd as _unused_module_zstd # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ",zstd"
+
+
+class _TYPE_FAILEDTELL(Enum):
+ token = 0
+
+
+_FAILEDTELL: Final[_TYPE_FAILEDTELL] = _TYPE_FAILEDTELL.token
+
+_TYPE_BODY_POSITION = typing.Union[int, _TYPE_FAILEDTELL]
+
+# When sending a request with these methods we aren't expecting
+# a body so don't need to set an explicit 'Content-Length: 0'
+# The reason we do this in the negative instead of tracking methods
+# which 'should' have a body is because unknown methods should be
+# treated as if they were 'POST' which *does* expect a body.
+_METHODS_NOT_EXPECTING_BODY = {"GET", "HEAD", "DELETE", "TRACE", "OPTIONS", "CONNECT"}
+
+
+def make_headers(
+ keep_alive: bool | None = None,
+ accept_encoding: bool | list[str] | str | None = None,
+ user_agent: str | None = None,
+ basic_auth: str | None = None,
+ proxy_basic_auth: str | None = None,
+ disable_cache: bool | None = None,
+) -> dict[str, str]:
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'. If the dependencies for
+ Brotli (either the ``brotli`` or ``brotlicffi`` package) and/or
+ Zstandard (the ``backports.zstd`` package for Python before 3.14)
+ algorithms are installed, then their encodings are
+ included in the string ('br' and 'zstd', respectively).
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example:
+
+ .. code-block:: python
+
+ import urllib3
+
+ print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0"))
+ # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ print(urllib3.util.make_headers(accept_encoding=True))
+ # {'accept-encoding': 'gzip,deflate'}
+ """
+ headers: dict[str, str] = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ",".join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers["accept-encoding"] = accept_encoding
+
+ if user_agent:
+ headers["user-agent"] = user_agent
+
+ if keep_alive:
+ headers["connection"] = "keep-alive"
+
+ if basic_auth:
+ headers["authorization"] = (
+ f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}"
+ )
+
+ if proxy_basic_auth:
+ headers["proxy-authorization"] = (
+ f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}"
+ )
+
+ if disable_cache:
+ headers["cache-control"] = "no-cache"
+
+ return headers
+
+
+def set_file_position(
+ body: typing.Any, pos: _TYPE_BODY_POSITION | None
+) -> _TYPE_BODY_POSITION | None:
+ """
+ If a position is provided, move file to that point.
+ Otherwise, we'll attempt to record a position for future use.
+ """
+ if pos is not None:
+ rewind_body(body, pos)
+ elif getattr(body, "tell", None) is not None:
+ try:
+ pos = body.tell()
+ except OSError:
+ # This differentiates from None, allowing us to catch
+ # a failed `tell()` later when trying to rewind the body.
+ pos = _FAILEDTELL
+
+ return pos
+
+
+def rewind_body(body: typing.IO[typing.AnyStr], body_pos: _TYPE_BODY_POSITION) -> None:
+ """
+ Attempt to rewind body to a certain position.
+ Primarily used for request redirects and retries.
+
+ :param body:
+ File-like object that supports seek.
+
+ :param int pos:
+ Position to seek to in file.
+ """
+ body_seek = getattr(body, "seek", None)
+ if body_seek is not None and isinstance(body_pos, int):
+ try:
+ body_seek(body_pos)
+ except OSError as e:
+ raise UnrewindableBodyError(
+ "An error occurred when rewinding request body for redirect/retry."
+ ) from e
+ elif body_pos is _FAILEDTELL:
+ raise UnrewindableBodyError(
+ "Unable to record file position for rewinding "
+ "request body during a redirect/retry."
+ )
+ else:
+ raise ValueError(
+ f"body_pos must be of type integer, instead it was {type(body_pos)}."
+ )
+
+
+class ChunksAndContentLength(typing.NamedTuple):
+ chunks: typing.Iterable[bytes] | None
+ content_length: int | None
+
+
+def body_to_chunks(
+ body: typing.Any | None, method: str, blocksize: int
+) -> ChunksAndContentLength:
+ """Takes the HTTP request method, body, and blocksize and
+ transforms them into an iterable of chunks to pass to
+ socket.sendall() and an optional 'Content-Length' header.
+
+ A 'Content-Length' of 'None' indicates the length of the body
+ can't be determined so should use 'Transfer-Encoding: chunked'
+ for framing instead.
+ """
+
+ chunks: typing.Iterable[bytes] | None
+ content_length: int | None
+
+ # No body, we need to make a recommendation on 'Content-Length'
+ # based on whether that request method is expected to have
+ # a body or not.
+ if body is None:
+ chunks = None
+ if method.upper() not in _METHODS_NOT_EXPECTING_BODY:
+ content_length = 0
+ else:
+ content_length = None
+
+ # Bytes or strings become bytes
+ elif isinstance(body, (str, bytes)):
+ chunks = (to_bytes(body),)
+ content_length = len(chunks[0])
+
+ # File-like object, TODO: use seek() and tell() for length?
+ elif hasattr(body, "read"):
+
+ def chunk_readable() -> typing.Iterable[bytes]:
+ encode = isinstance(body, io.TextIOBase)
+ while True:
+ datablock = body.read(blocksize)
+ if not datablock:
+ break
+ if encode:
+ datablock = datablock.encode("utf-8")
+ yield datablock
+
+ chunks = chunk_readable()
+ content_length = None
+
+ # Otherwise we need to start checking via duck-typing.
+ else:
+ try:
+ # Check if the body implements the buffer API.
+ mv = memoryview(body)
+ except TypeError:
+ try:
+ # Check if the body is an iterable
+ chunks = iter(body)
+ content_length = None
+ except TypeError:
+ raise TypeError(
+ f"'body' must be a bytes-like object, file-like "
+ f"object, or iterable. Instead was {body!r}"
+ ) from None
+ else:
+ # Since it implements the buffer API can be passed directly to socket.sendall()
+ chunks = (body,)
+ content_length = mv.nbytes
+
+ return ChunksAndContentLength(chunks=chunks, content_length=content_length)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/response.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/response.py"
new file mode 100644
index 0000000..0f45786
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/response.py"
@@ -0,0 +1,101 @@
+from __future__ import annotations
+
+import http.client as httplib
+from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
+
+from ..exceptions import HeaderParsingError
+
+
+def is_fp_closed(obj: object) -> bool:
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check `isclosed()` first, in case Python3 doesn't set `closed`.
+ # GH Issue #928
+ return obj.isclosed() # type: ignore[no-any-return, attr-defined]
+ except AttributeError:
+ pass
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed # type: ignore[no-any-return, attr-defined]
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None # type: ignore[attr-defined]
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers: httplib.HTTPMessage) -> None:
+ """
+ Asserts whether all headers have been successfully parsed.
+ Extracts encountered errors from the result of parsing headers.
+
+ Only works on Python 3.
+
+ :param http.client.HTTPMessage headers: Headers to verify.
+
+ :raises urllib3.exceptions.HeaderParsingError:
+ If parsing errors are found.
+ """
+
+ # This will fail silently if we pass in the wrong kind of parameter.
+ # To make debugging easier add an explicit check.
+ if not isinstance(headers, httplib.HTTPMessage):
+ raise TypeError(f"expected httplib.Message, got {type(headers)}.")
+
+ unparsed_data = None
+
+ # get_payload is actually email.message.Message.get_payload;
+ # we're only interested in the result if it's not a multipart message
+ if not headers.is_multipart():
+ payload = headers.get_payload()
+
+ if isinstance(payload, (bytes, str)):
+ unparsed_data = payload
+
+ # httplib is assuming a response body is available
+ # when parsing headers even when httplib only sends
+ # header data to parse_headers() This results in
+ # defects on multipart responses in particular.
+ # See: https://github.com/urllib3/urllib3/issues/800
+
+ # So we ignore the following defects:
+ # - StartBoundaryNotFoundDefect:
+ # The claimed start boundary was never found.
+ # - MultipartInvariantViolationDefect:
+ # A message claimed to be a multipart but no subparts were found.
+ defects = [
+ defect
+ for defect in headers.defects
+ if not isinstance(
+ defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
+ )
+ ]
+
+ if defects or unparsed_data:
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response: httplib.HTTPResponse) -> bool:
+ """
+ Checks whether the request of a response has been a HEAD-request.
+
+ :param http.client.HTTPResponse response:
+ Response to check if the originating request
+ used 'HEAD' as a method.
+ """
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ method_str = response._method # type: str # type: ignore[attr-defined]
+ return method_str.upper() == "HEAD"
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/retry.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/retry.py"
new file mode 100644
index 0000000..0456cce
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/retry.py"
@@ -0,0 +1,533 @@
+from __future__ import annotations
+
+import email
+import logging
+import random
+import re
+import time
+import typing
+from itertools import takewhile
+from types import TracebackType
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ InvalidHeader,
+ MaxRetryError,
+ ProtocolError,
+ ProxyError,
+ ReadTimeoutError,
+ ResponseError,
+)
+from .util import reraise
+
+if typing.TYPE_CHECKING:
+ from typing_extensions import Self
+
+ from ..connectionpool import ConnectionPool
+ from ..response import BaseHTTPResponse
+
+log = logging.getLogger(__name__)
+
+
+# Data structure for representing the metadata of requests that result in a retry.
+class RequestHistory(typing.NamedTuple):
+ method: str | None
+ url: str | None
+ error: Exception | None
+ status: int | None
+ redirect_location: str | None
+
+
+class Retry:
+ """Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool:
+
+ .. code-block:: python
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request("GET", "https://example.com/")
+
+ Or per-request (which overrides the default for the pool):
+
+ .. code-block:: python
+
+ response = http.request("GET", "https://example.com/", retries=Retry(10))
+
+ Retries can be disabled by passing ``False``:
+
+ .. code-block:: python
+
+ response = http.request("GET", "https://example.com/", retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int status:
+ How many times to retry on bad status codes.
+
+ These are retries made on responses, where status code matches
+ ``status_forcelist``.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int other:
+ How many times to retry on other errors.
+
+ Other errors are errors that are not connect, read, redirect or status errors.
+ These errors might be raised after the request was sent to the server, so the
+ request might have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ If ``total`` is not set, it's a good idea to set this to 0 to account
+ for unexpected edge cases and avoid infinite retry loops.
+
+ :param Collection allowed_methods:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ idempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
+
+ Set to a ``None`` value to retry on any verb.
+
+ :param Collection status_forcelist:
+ A set of integer HTTP status codes that we should force a retry on.
+ A retry is initiated if the request method is in ``allowed_methods``
+ and the response status code is in ``status_forcelist``.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts after the second try
+ (most errors are resolved immediately by a second try without a
+ delay). urllib3 will sleep for::
+
+ {backoff factor} * (2 ** ({number of previous retries}))
+
+ seconds. If `backoff_jitter` is non-zero, this sleep is extended by::
+
+ random.uniform(0, {backoff jitter})
+
+ seconds. For example, if the backoff_factor is 0.1, then :func:`Retry.sleep` will
+ sleep for [0.0s, 0.2s, 0.4s, 0.8s, ...] between retries. No backoff will ever
+ be longer than `backoff_max`.
+
+ By default, backoff is disabled (factor set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+
+ :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
+ whether we should raise an exception, or return a response,
+ if status falls in ``status_forcelist`` range and retries have
+ been exhausted.
+
+ :param tuple history: The history of the request encountered during
+ each call to :meth:`~Retry.increment`. The list is in the order
+ the requests occurred. Each list item is of class :class:`RequestHistory`.
+
+ :param bool respect_retry_after_header:
+ Whether to respect Retry-After header on status codes defined as
+ :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
+
+ :param Collection remove_headers_on_redirect:
+ Sequence of headers to remove from the request when a response
+ indicating a redirect is returned before firing off the redirected
+ request.
+ """
+
+ #: Default methods to be used for ``allowed_methods``
+ DEFAULT_ALLOWED_METHODS = frozenset(
+ ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+ )
+
+ #: Default status codes to be used for ``status_forcelist``
+ RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+
+ #: Default headers to be used for ``remove_headers_on_redirect``
+ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(
+ ["Cookie", "Authorization", "Proxy-Authorization"]
+ )
+
+ #: Default maximum backoff time.
+ DEFAULT_BACKOFF_MAX = 120
+
+ # Backward compatibility; assigned outside of the class.
+ DEFAULT: typing.ClassVar[Retry]
+
+ def __init__(
+ self,
+ total: bool | int | None = 10,
+ connect: int | None = None,
+ read: int | None = None,
+ redirect: bool | int | None = None,
+ status: int | None = None,
+ other: int | None = None,
+ allowed_methods: typing.Collection[str] | None = DEFAULT_ALLOWED_METHODS,
+ status_forcelist: typing.Collection[int] | None = None,
+ backoff_factor: float = 0,
+ backoff_max: float = DEFAULT_BACKOFF_MAX,
+ raise_on_redirect: bool = True,
+ raise_on_status: bool = True,
+ history: tuple[RequestHistory, ...] | None = None,
+ respect_retry_after_header: bool = True,
+ remove_headers_on_redirect: typing.Collection[
+ str
+ ] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT,
+ backoff_jitter: float = 0.0,
+ ) -> None:
+ self.total = total
+ self.connect = connect
+ self.read = read
+ self.status = status
+ self.other = other
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.allowed_methods = allowed_methods
+ self.backoff_factor = backoff_factor
+ self.backoff_max = backoff_max
+ self.raise_on_redirect = raise_on_redirect
+ self.raise_on_status = raise_on_status
+ self.history = history or ()
+ self.respect_retry_after_header = respect_retry_after_header
+ self.remove_headers_on_redirect = frozenset(
+ h.lower() for h in remove_headers_on_redirect
+ )
+ self.backoff_jitter = backoff_jitter
+
+ def new(self, **kw: typing.Any) -> Self:
+ params = dict(
+ total=self.total,
+ connect=self.connect,
+ read=self.read,
+ redirect=self.redirect,
+ status=self.status,
+ other=self.other,
+ allowed_methods=self.allowed_methods,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ backoff_max=self.backoff_max,
+ raise_on_redirect=self.raise_on_redirect,
+ raise_on_status=self.raise_on_status,
+ history=self.history,
+ remove_headers_on_redirect=self.remove_headers_on_redirect,
+ respect_retry_after_header=self.respect_retry_after_header,
+ backoff_jitter=self.backoff_jitter,
+ )
+
+ params.update(kw)
+ return type(self)(**params) # type: ignore[arg-type]
+
+ @classmethod
+ def from_int(
+ cls,
+ retries: Retry | bool | int | None,
+ redirect: bool | int | None = True,
+ default: Retry | bool | int | None = None,
+ ) -> Retry:
+ """Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r", retries, new_retries)
+ return new_retries
+
+ def get_backoff_time(self) -> float:
+ """Formula for computing the current backoff
+
+ :rtype: float
+ """
+ # We want to consider only the last consecutive errors sequence (Ignore redirects).
+ consecutive_errors_len = len(
+ list(
+ takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+ )
+ )
+ if consecutive_errors_len <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+ if self.backoff_jitter != 0.0:
+ backoff_value += random.random() * self.backoff_jitter
+ return float(max(0, min(self.backoff_max, backoff_value)))
+
+ def parse_retry_after(self, retry_after: str) -> float:
+ seconds: float
+ # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+ if re.match(r"^\s*[0-9]+\s*$", retry_after):
+ seconds = int(retry_after)
+ else:
+ retry_date_tuple = email.utils.parsedate_tz(retry_after)
+ if retry_date_tuple is None:
+ raise InvalidHeader(f"Invalid Retry-After header: {retry_after}")
+
+ retry_date = email.utils.mktime_tz(retry_date_tuple)
+ seconds = retry_date - time.time()
+
+ seconds = max(seconds, 0)
+
+ return seconds
+
+ def get_retry_after(self, response: BaseHTTPResponse) -> float | None:
+ """Get the value of Retry-After in seconds."""
+
+ retry_after = response.headers.get("Retry-After")
+
+ if retry_after is None:
+ return None
+
+ return self.parse_retry_after(retry_after)
+
+ def sleep_for_retry(self, response: BaseHTTPResponse) -> bool:
+ retry_after = self.get_retry_after(response)
+ if retry_after:
+ time.sleep(retry_after)
+ return True
+
+ return False
+
+ def _sleep_backoff(self) -> None:
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def sleep(self, response: BaseHTTPResponse | None = None) -> None:
+ """Sleep between retry attempts.
+
+ This method will respect a server's ``Retry-After`` response header
+ and sleep the duration of the time requested. If that is not present, it
+ will use an exponential backoff. By default, the backoff factor is 0 and
+ this method will return immediately.
+ """
+
+ if self.respect_retry_after_header and response:
+ slept = self.sleep_for_retry(response)
+ if slept:
+ return
+
+ self._sleep_backoff()
+
+ def _is_connection_error(self, err: Exception) -> bool:
+ """Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ if isinstance(err, ProxyError):
+ err = err.original_error
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err: Exception) -> bool:
+ """Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def _is_method_retryable(self, method: str) -> bool:
+ """Checks if a given HTTP method should be retried upon, depending if
+ it is included in the allowed_methods
+ """
+ if self.allowed_methods and method.upper() not in self.allowed_methods:
+ return False
+ return True
+
+ def is_retry(
+ self, method: str, status_code: int, has_retry_after: bool = False
+ ) -> bool:
+ """Is this method/status code retryable? (Based on allowlists and control
+ variables such as the number of total retries to allow, whether to
+ respect the Retry-After header, whether this header is present, and
+ whether the returned status code is on the list of status codes to
+ be retried upon on the presence of the aforementioned header)
+ """
+ if not self._is_method_retryable(method):
+ return False
+
+ if self.status_forcelist and status_code in self.status_forcelist:
+ return True
+
+ return bool(
+ self.total
+ and self.respect_retry_after_header
+ and has_retry_after
+ and (status_code in self.RETRY_AFTER_STATUS_CODES)
+ )
+
+ def is_exhausted(self) -> bool:
+ """Are we out of retries?"""
+ retry_counts = [
+ x
+ for x in (
+ self.total,
+ self.connect,
+ self.read,
+ self.redirect,
+ self.status,
+ self.other,
+ )
+ if x
+ ]
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(
+ self,
+ method: str | None = None,
+ url: str | None = None,
+ response: BaseHTTPResponse | None = None,
+ error: Exception | None = None,
+ _pool: ConnectionPool | None = None,
+ _stacktrace: TracebackType | None = None,
+ ) -> Self:
+ """Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.BaseHTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ status_count = self.status
+ other = self.other
+ cause = "unknown"
+ status = None
+ redirect_location = None
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False or method is None or not self._is_method_retryable(method):
+ raise reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+
+ elif error:
+ # Other retry?
+ if other is not None:
+ other -= 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = "too many redirects"
+ response_redirect_location = response.get_redirect_location()
+ if response_redirect_location:
+ redirect_location = response_redirect_location
+ status = response.status
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and the given method is in the allowed_methods
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ if status_count is not None:
+ status_count -= 1
+ cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
+ status = response.status
+
+ history = self.history + (
+ RequestHistory(method, url, error, status, redirect_location),
+ )
+
+ new_retry = self.new(
+ total=total,
+ connect=connect,
+ read=read,
+ redirect=redirect,
+ status=status_count,
+ other=other,
+ history=history,
+ )
+
+ if new_retry.is_exhausted():
+ reason = error or ResponseError(cause)
+ raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type]
+
+ log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
+
+ return new_retry
+
+ def __repr__(self) -> str:
+ return (
+ f"{type(self).__name__}(total={self.total}, connect={self.connect}, "
+ f"read={self.read}, redirect={self.redirect}, status={self.status})"
+ )
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_.py"
new file mode 100644
index 0000000..56fe909
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_.py"
@@ -0,0 +1,527 @@
+from __future__ import annotations
+
+import hashlib
+import hmac
+import os
+import socket
+import sys
+import typing
+import warnings
+from binascii import unhexlify
+
+from ..exceptions import ProxySchemeUnsupported, SSLError
+from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE
+
+SSLContext = None
+SSLTransport = None
+HAS_NEVER_CHECK_COMMON_NAME = False
+IS_PYOPENSSL = False
+ALPN_PROTOCOLS = ["http/1.1"]
+
+_TYPE_VERSION_INFO = tuple[int, int, int, str, int]
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {
+ length: getattr(hashlib, algorithm, None)
+ for length, algorithm in ((32, "md5"), (40, "sha1"), (64, "sha256"))
+}
+
+
+def _is_bpo_43522_fixed(
+ implementation_name: str,
+ version_info: _TYPE_VERSION_INFO,
+ pypy_version_info: _TYPE_VERSION_INFO | None,
+) -> bool:
+ """Return True for CPython 3.9.3+ or 3.10+ and PyPy 7.3.8+ where
+ setting SSLContext.hostname_checks_common_name to False works.
+
+ Outside of CPython and PyPy we don't know which implementations work
+ or not so we conservatively use our hostname matching as we know that works
+ on all implementations.
+
+ https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963
+ https://foss.heptapod.net/pypy/pypy/-/issues/3539
+ """
+ if implementation_name == "pypy":
+ # https://foss.heptapod.net/pypy/pypy/-/issues/3129
+ return pypy_version_info >= (7, 3, 8) # type: ignore[operator]
+ elif implementation_name == "cpython":
+ major_minor = version_info[:2]
+ micro = version_info[2]
+ return (major_minor == (3, 9) and micro >= 3) or major_minor >= (3, 10)
+ else: # Defensive:
+ return False
+
+
+def _is_has_never_check_common_name_reliable(
+ openssl_version: str,
+ openssl_version_number: int,
+ implementation_name: str,
+ version_info: _TYPE_VERSION_INFO,
+ pypy_version_info: _TYPE_VERSION_INFO | None,
+) -> bool:
+ # As of May 2023, all released versions of LibreSSL fail to reject certificates with
+ # only common names, see https://github.com/urllib3/urllib3/pull/3024
+ is_openssl = openssl_version.startswith("OpenSSL ")
+ # Before fixing OpenSSL issue #14579, the SSL_new() API was not copying hostflags
+ # like X509_CHECK_FLAG_NEVER_CHECK_SUBJECT, which tripped up CPython.
+ # https://github.com/openssl/openssl/issues/14579
+ # This was released in OpenSSL 1.1.1l+ (>=0x101010cf)
+ is_openssl_issue_14579_fixed = openssl_version_number >= 0x101010CF
+
+ return is_openssl and (
+ is_openssl_issue_14579_fixed
+ or _is_bpo_43522_fixed(implementation_name, version_info, pypy_version_info)
+ )
+
+
+if typing.TYPE_CHECKING:
+ from ssl import VerifyMode
+ from typing import TypedDict
+
+ from .ssltransport import SSLTransport as SSLTransportType
+
+ class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False):
+ subjectAltName: tuple[tuple[str, str], ...]
+ subject: tuple[tuple[tuple[str, str], ...], ...]
+ serialNumber: str
+
+
+# Mapping from 'ssl.PROTOCOL_TLSX' to 'TLSVersion.X'
+_SSL_VERSION_TO_TLS_VERSION: dict[int, int] = {}
+
+try: # Do we have ssl at all?
+ import ssl
+ from ssl import ( # type: ignore[assignment]
+ CERT_REQUIRED,
+ HAS_NEVER_CHECK_COMMON_NAME,
+ OP_NO_COMPRESSION,
+ OP_NO_TICKET,
+ OPENSSL_VERSION,
+ OPENSSL_VERSION_NUMBER,
+ PROTOCOL_TLS,
+ PROTOCOL_TLS_CLIENT,
+ VERIFY_X509_STRICT,
+ OP_NO_SSLv2,
+ OP_NO_SSLv3,
+ SSLContext,
+ TLSVersion,
+ )
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+
+ # Needed for Python 3.9 which does not define this
+ VERIFY_X509_PARTIAL_CHAIN = getattr(ssl, "VERIFY_X509_PARTIAL_CHAIN", 0x80000)
+
+ # Setting SSLContext.hostname_checks_common_name = False didn't work before CPython
+ # 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+
+ if HAS_NEVER_CHECK_COMMON_NAME and not _is_has_never_check_common_name_reliable(
+ OPENSSL_VERSION,
+ OPENSSL_VERSION_NUMBER,
+ sys.implementation.name,
+ sys.version_info,
+ sys.pypy_version_info if sys.implementation.name == "pypy" else None, # type: ignore[attr-defined]
+ ): # Defensive: for Python < 3.9.3
+ HAS_NEVER_CHECK_COMMON_NAME = False
+
+ # Need to be careful here in case old TLS versions get
+ # removed in future 'ssl' module implementations.
+ for attr in ("TLSv1", "TLSv1_1", "TLSv1_2"):
+ try:
+ _SSL_VERSION_TO_TLS_VERSION[getattr(ssl, f"PROTOCOL_{attr}")] = getattr(
+ TLSVersion, attr
+ )
+ except AttributeError: # Defensive:
+ continue
+
+ from .ssltransport import SSLTransport # type: ignore[assignment]
+except ImportError:
+ OP_NO_COMPRESSION = 0x20000 # type: ignore[assignment, misc]
+ OP_NO_TICKET = 0x4000 # type: ignore[assignment, misc]
+ OP_NO_SSLv2 = 0x1000000 # type: ignore[assignment, misc]
+ OP_NO_SSLv3 = 0x2000000 # type: ignore[assignment, misc]
+ PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 # type: ignore[assignment, misc]
+ PROTOCOL_TLS_CLIENT = 16 # type: ignore[assignment, misc]
+ VERIFY_X509_PARTIAL_CHAIN = 0x80000
+ VERIFY_X509_STRICT = 0x20 # type: ignore[assignment, misc]
+
+
+_TYPE_PEER_CERT_RET = typing.Union["_TYPE_PEER_CERT_RET_DICT", bytes, None]
+
+
+def assert_fingerprint(cert: bytes | None, fingerprint: str) -> None:
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ if cert is None:
+ raise SSLError("No certificate for the peer.")
+
+ fingerprint = fingerprint.replace(":", "").lower()
+ digest_length = len(fingerprint)
+ if digest_length not in HASHFUNC_MAP:
+ raise SSLError(f"Fingerprint of invalid length: {fingerprint}")
+ hashfunc = HASHFUNC_MAP.get(digest_length)
+ if hashfunc is None:
+ raise SSLError(
+ f"Hash function implementation unavailable for fingerprint length: {digest_length}"
+ )
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not hmac.compare_digest(cert_digest, fingerprint_bytes):
+ raise SSLError(
+ f'Fingerprints did not match. Expected "{fingerprint}", got "{cert_digest.hex()}"'
+ )
+
+
+def resolve_cert_reqs(candidate: None | int | str) -> VerifyMode:
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_REQUIRED`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbreviation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_REQUIRED
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, "CERT_" + candidate)
+ return res # type: ignore[no-any-return]
+
+ return candidate # type: ignore[return-value]
+
+
+def resolve_ssl_version(candidate: None | int | str) -> int:
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_TLS
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, "PROTOCOL_" + candidate)
+ return typing.cast(int, res)
+
+ return candidate
+
+
+def create_urllib3_context(
+ ssl_version: int | None = None,
+ cert_reqs: int | None = None,
+ options: int | None = None,
+ ciphers: str | None = None,
+ ssl_minimum_version: int | None = None,
+ ssl_maximum_version: int | None = None,
+ verify_flags: int | None = None,
+) -> ssl.SSLContext:
+ """Creates and configures an :class:`ssl.SSLContext` instance for use with urllib3.
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+
+ This parameter is deprecated instead use 'ssl_minimum_version'.
+ :param ssl_minimum_version:
+ The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
+ :param ssl_maximum_version:
+ The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value.
+ Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the
+ default value.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
+ :param ciphers:
+ Which cipher suites to allow the server to select. Defaults to either system configured
+ ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers.
+ :param verify_flags:
+ The flags for certificate verification operations. These default to
+ ``ssl.VERIFY_X509_PARTIAL_CHAIN`` and ``ssl.VERIFY_X509_STRICT`` for Python 3.13+.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ if SSLContext is None:
+ raise TypeError("Can't create an SSLContext object without an ssl module")
+
+ # This means 'ssl_version' was specified as an exact value.
+ if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT):
+ # Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version'
+ # to avoid conflicts.
+ if ssl_minimum_version is not None or ssl_maximum_version is not None:
+ raise ValueError(
+ "Can't specify both 'ssl_version' and either "
+ "'ssl_minimum_version' or 'ssl_maximum_version'"
+ )
+
+ # 'ssl_version' is deprecated and will be removed in the future.
+ else:
+ # Use 'ssl_minimum_version' and 'ssl_maximum_version' instead.
+ ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get(
+ ssl_version, TLSVersion.MINIMUM_SUPPORTED
+ )
+ ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get(
+ ssl_version, TLSVersion.MAXIMUM_SUPPORTED
+ )
+
+ # This warning message is pushing users to use 'ssl_minimum_version'
+ # instead of both min/max. Best practice is to only set the minimum version and
+ # keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED'
+ warnings.warn(
+ "'ssl_version' option is deprecated and will be "
+ "removed in urllib3 v2.6.0. Instead use 'ssl_minimum_version'",
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+
+ # PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT
+ context = SSLContext(PROTOCOL_TLS_CLIENT)
+
+ if ssl_minimum_version is not None:
+ context.minimum_version = ssl_minimum_version
+ else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here
+ context.minimum_version = TLSVersion.TLSv1_2
+
+ if ssl_maximum_version is not None:
+ context.maximum_version = ssl_maximum_version
+
+ # Unless we're given ciphers defer to either system ciphers in
+ # the case of OpenSSL 1.1.1+ or use our own secure default ciphers.
+ if ciphers:
+ context.set_ciphers(ciphers)
+
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+ # TLSv1.2 only. Unless set explicitly, do not request tickets.
+ # This may save some bandwidth on wire, and although the ticket is encrypted,
+ # there is a risk associated with it being on wire,
+ # if the server is not rotating its ticketing keys properly.
+ options |= OP_NO_TICKET
+
+ context.options |= options
+
+ if verify_flags is None:
+ verify_flags = 0
+ # In Python 3.13+ ssl.create_default_context() sets VERIFY_X509_PARTIAL_CHAIN
+ # and VERIFY_X509_STRICT so we do the same
+ if sys.version_info >= (3, 13):
+ verify_flags |= VERIFY_X509_PARTIAL_CHAIN
+ verify_flags |= VERIFY_X509_STRICT
+
+ context.verify_flags |= verify_flags
+
+ # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
+ # necessary for conditional client cert authentication with TLS 1.3.
+ # The attribute is None for OpenSSL <= 1.1.0 or does not exist when using
+ # an SSLContext created by pyOpenSSL.
+ if getattr(context, "post_handshake_auth", None) is not None:
+ context.post_handshake_auth = True
+
+ # The order of the below lines setting verify_mode and check_hostname
+ # matter due to safe-guards SSLContext has to prevent an SSLContext with
+ # check_hostname=True, verify_mode=NONE/OPTIONAL.
+ # We always set 'check_hostname=False' for pyOpenSSL so we rely on our own
+ # 'ssl.match_hostname()' implementation.
+ if cert_reqs == ssl.CERT_REQUIRED and not IS_PYOPENSSL:
+ context.verify_mode = cert_reqs
+ context.check_hostname = True
+ else:
+ context.check_hostname = False
+ context.verify_mode = cert_reqs
+
+ try:
+ context.hostname_checks_common_name = False
+ except AttributeError: # Defensive: for CPython < 3.9.3; for PyPy < 7.3.8
+ pass
+
+ if "SSLKEYLOGFILE" in os.environ:
+ sslkeylogfile = os.path.expandvars(os.environ.get("SSLKEYLOGFILE"))
+ else:
+ sslkeylogfile = None
+ if sslkeylogfile:
+ context.keylog_filename = sslkeylogfile
+
+ return context
+
+
+@typing.overload
+def ssl_wrap_socket(
+ sock: socket.socket,
+ keyfile: str | None = ...,
+ certfile: str | None = ...,
+ cert_reqs: int | None = ...,
+ ca_certs: str | None = ...,
+ server_hostname: str | None = ...,
+ ssl_version: int | None = ...,
+ ciphers: str | None = ...,
+ ssl_context: ssl.SSLContext | None = ...,
+ ca_cert_dir: str | None = ...,
+ key_password: str | None = ...,
+ ca_cert_data: None | str | bytes = ...,
+ tls_in_tls: typing.Literal[False] = ...,
+) -> ssl.SSLSocket: ...
+
+
+@typing.overload
+def ssl_wrap_socket(
+ sock: socket.socket,
+ keyfile: str | None = ...,
+ certfile: str | None = ...,
+ cert_reqs: int | None = ...,
+ ca_certs: str | None = ...,
+ server_hostname: str | None = ...,
+ ssl_version: int | None = ...,
+ ciphers: str | None = ...,
+ ssl_context: ssl.SSLContext | None = ...,
+ ca_cert_dir: str | None = ...,
+ key_password: str | None = ...,
+ ca_cert_data: None | str | bytes = ...,
+ tls_in_tls: bool = ...,
+) -> ssl.SSLSocket | SSLTransportType: ...
+
+
+def ssl_wrap_socket(
+ sock: socket.socket,
+ keyfile: str | None = None,
+ certfile: str | None = None,
+ cert_reqs: int | None = None,
+ ca_certs: str | None = None,
+ server_hostname: str | None = None,
+ ssl_version: int | None = None,
+ ciphers: str | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ ca_cert_dir: str | None = None,
+ key_password: str | None = None,
+ ca_cert_data: None | str | bytes = None,
+ tls_in_tls: bool = False,
+) -> ssl.SSLSocket | SSLTransportType:
+ """
+ All arguments except for server_hostname, ssl_context, tls_in_tls, ca_cert_data and
+ ca_cert_dir have the same meaning as they do when using
+ :func:`ssl.create_default_context`, :meth:`ssl.SSLContext.load_cert_chain`,
+ :meth:`ssl.SSLContext.set_ciphers` and :meth:`ssl.SSLContext.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support.
+ :param ca_cert_dir:
+ A directory containing CA certificates in multiple separate files, as
+ supported by OpenSSL's -CApath flag or the capath argument to
+ SSLContext.load_verify_locations().
+ :param key_password:
+ Optional password if the keyfile is encrypted.
+ :param ca_cert_data:
+ Optional string containing CA certificates in PEM format suitable for
+ passing as the cadata parameter to SSLContext.load_verify_locations()
+ :param tls_in_tls:
+ Use SSLTransport to wrap the existing socket.
+ """
+ context = ssl_context
+ if context is None:
+ # Note: This branch of code and all the variables in it are only used in tests.
+ # We should consider deprecating and removing this code.
+ context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
+
+ if ca_certs or ca_cert_dir or ca_cert_data:
+ try:
+ context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
+ except OSError as e:
+ raise SSLError(e) from e
+
+ elif ssl_context is None and hasattr(context, "load_default_certs"):
+ # try to load OS default certs; works well on Windows.
+ context.load_default_certs()
+
+ # Attempt to detect if we get the goofy behavior of the
+ # keyfile being encrypted and OpenSSL asking for the
+ # passphrase via the terminal and instead error out.
+ if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
+ raise SSLError("Client private key is encrypted, password is required")
+
+ if certfile:
+ if key_password is None:
+ context.load_cert_chain(certfile, keyfile)
+ else:
+ context.load_cert_chain(certfile, keyfile, key_password)
+
+ context.set_alpn_protocols(ALPN_PROTOCOLS)
+
+ ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
+ return ssl_sock
+
+
+def is_ipaddress(hostname: str | bytes) -> bool:
+ """Detects whether the hostname given is an IPv4 or IPv6 address.
+ Also detects IPv6 addresses with Zone IDs.
+
+ :param str hostname: Hostname to examine.
+ :return: True if the hostname is an IP address, False otherwise.
+ """
+ if isinstance(hostname, bytes):
+ # IDN A-label bytes are ASCII compatible.
+ hostname = hostname.decode("ascii")
+ return bool(_IPV4_RE.match(hostname) or _BRACELESS_IPV6_ADDRZ_RE.match(hostname))
+
+
+def _is_key_file_encrypted(key_file: str) -> bool:
+ """Detects if a key file is encrypted or not."""
+ with open(key_file) as f:
+ for line in f:
+ # Look for Proc-Type: 4,ENCRYPTED
+ if "ENCRYPTED" in line:
+ return True
+
+ return False
+
+
+def _ssl_wrap_socket_impl(
+ sock: socket.socket,
+ ssl_context: ssl.SSLContext,
+ tls_in_tls: bool,
+ server_hostname: str | None = None,
+) -> ssl.SSLSocket | SSLTransportType:
+ if tls_in_tls:
+ if not SSLTransport:
+ # Import error, ssl is not available.
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires support for the 'ssl' module"
+ )
+
+ SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
+ return SSLTransport(sock, ssl_context, server_hostname)
+
+ return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_match_hostname.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_match_hostname.py"
new file mode 100644
index 0000000..25d9100
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssl_match_hostname.py"
@@ -0,0 +1,159 @@
+"""The match_hostname() function from Python 3.5, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib. http://docs.python.org/3/license.html
+# It is modified to remove commonName support.
+
+from __future__ import annotations
+
+import ipaddress
+import re
+import typing
+from ipaddress import IPv4Address, IPv6Address
+
+if typing.TYPE_CHECKING:
+ from .ssl_ import _TYPE_PEER_CERT_RET_DICT
+
+__version__ = "3.5.0.1"
+
+
+class CertificateError(ValueError):
+ pass
+
+
+def _dnsname_match(
+ dn: typing.Any, hostname: str, max_wildcards: int = 1
+) -> typing.Match[str] | None | bool:
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r".")
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count("*")
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn)
+ )
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return bool(dn.lower() == hostname.lower())
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == "*":
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append("[^.]+")
+ elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def _ipaddress_match(ipname: str, host_ip: IPv4Address | IPv6Address) -> bool:
+ """Exact matching of IP addresses.
+
+ RFC 9110 section 4.3.5: "A reference identity of IP-ID contains the decoded
+ bytes of the IP address. An IP version 4 address is 4 octets, and an IP
+ version 6 address is 16 octets. [...] A reference identity of type IP-ID
+ matches if the address is identical to an iPAddress value of the
+ subjectAltName extension of the certificate."
+ """
+ # OpenSSL may add a trailing newline to a subjectAltName's IP address
+ # Divergence from upstream: ipaddress can't handle byte str
+ ip = ipaddress.ip_address(ipname.rstrip())
+ return bool(ip.packed == host_ip.packed)
+
+
+def match_hostname(
+ cert: _TYPE_PEER_CERT_RET_DICT | None,
+ hostname: str,
+ hostname_checks_common_name: bool = False,
+) -> None:
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError(
+ "empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED"
+ )
+ try:
+ # Divergence from upstream: ipaddress can't handle byte str
+ #
+ # The ipaddress module shipped with Python < 3.9 does not support
+ # scoped IPv6 addresses so we unconditionally strip the Zone IDs for
+ # now. Once we drop support for Python 3.9 we can remove this branch.
+ if "%" in hostname:
+ host_ip = ipaddress.ip_address(hostname[: hostname.rfind("%")])
+ else:
+ host_ip = ipaddress.ip_address(hostname)
+
+ except ValueError:
+ # Not an IP address (common case)
+ host_ip = None
+ dnsnames = []
+ san: tuple[tuple[str, str], ...] = cert.get("subjectAltName", ())
+ key: str
+ value: str
+ for key, value in san:
+ if key == "DNS":
+ if host_ip is None and _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ elif key == "IP Address":
+ if host_ip is not None and _ipaddress_match(value, host_ip):
+ return
+ dnsnames.append(value)
+
+ # We only check 'commonName' if it's enabled and we're not verifying
+ # an IP address. IP addresses aren't valid within 'commonName'.
+ if hostname_checks_common_name and host_ip is None and not dnsnames:
+ for sub in cert.get("subject", ()):
+ for key, value in sub:
+ if key == "commonName":
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value) # Defensive: for Python < 3.9.3
+
+ if len(dnsnames) > 1:
+ raise CertificateError(
+ "hostname %r "
+ "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
+ )
+ elif len(dnsnames) == 1:
+ raise CertificateError(f"hostname {hostname!r} doesn't match {dnsnames[0]!r}")
+ else:
+ raise CertificateError("no appropriate subjectAltName fields were found")
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssltransport.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssltransport.py"
new file mode 100644
index 0000000..6d59bc3
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/ssltransport.py"
@@ -0,0 +1,271 @@
+from __future__ import annotations
+
+import io
+import socket
+import ssl
+import typing
+
+from ..exceptions import ProxySchemeUnsupported
+
+if typing.TYPE_CHECKING:
+ from typing_extensions import Self
+
+ from .ssl_ import _TYPE_PEER_CERT_RET, _TYPE_PEER_CERT_RET_DICT
+
+
+_WriteBuffer = typing.Union[bytearray, memoryview]
+_ReturnValue = typing.TypeVar("_ReturnValue")
+
+SSL_BLOCKSIZE = 16384
+
+
+class SSLTransport:
+ """
+ The SSLTransport wraps an existing socket and establishes an SSL connection.
+
+ Contrary to Python's implementation of SSLSocket, it allows you to chain
+ multiple TLS connections together. It's particularly useful if you need to
+ implement TLS within TLS.
+
+ The class supports most of the socket API operations.
+ """
+
+ @staticmethod
+ def _validate_ssl_context_for_tls_in_tls(ssl_context: ssl.SSLContext) -> None:
+ """
+ Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
+ for TLS in TLS.
+
+ The only requirement is that the ssl_context provides the 'wrap_bio'
+ methods.
+ """
+
+ if not hasattr(ssl_context, "wrap_bio"):
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "available on non-native SSLContext"
+ )
+
+ def __init__(
+ self,
+ socket: socket.socket,
+ ssl_context: ssl.SSLContext,
+ server_hostname: str | None = None,
+ suppress_ragged_eofs: bool = True,
+ ) -> None:
+ """
+ Create an SSLTransport around socket using the provided ssl_context.
+ """
+ self.incoming = ssl.MemoryBIO()
+ self.outgoing = ssl.MemoryBIO()
+
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self.socket = socket
+
+ self.sslobj = ssl_context.wrap_bio(
+ self.incoming, self.outgoing, server_hostname=server_hostname
+ )
+
+ # Perform initial handshake.
+ self._ssl_io_loop(self.sslobj.do_handshake)
+
+ def __enter__(self) -> Self:
+ return self
+
+ def __exit__(self, *_: typing.Any) -> None:
+ self.close()
+
+ def fileno(self) -> int:
+ return self.socket.fileno()
+
+ def read(self, len: int = 1024, buffer: typing.Any | None = None) -> int | bytes:
+ return self._wrap_ssl_read(len, buffer)
+
+ def recv(self, buflen: int = 1024, flags: int = 0) -> int | bytes:
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to recv")
+ return self._wrap_ssl_read(buflen)
+
+ def recv_into(
+ self,
+ buffer: _WriteBuffer,
+ nbytes: int | None = None,
+ flags: int = 0,
+ ) -> None | int | bytes:
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to recv_into")
+ if nbytes is None:
+ nbytes = len(buffer)
+ return self.read(nbytes, buffer)
+
+ def sendall(self, data: bytes, flags: int = 0) -> None:
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to sendall")
+ count = 0
+ with memoryview(data) as view, view.cast("B") as byte_view:
+ amount = len(byte_view)
+ while count < amount:
+ v = self.send(byte_view[count:])
+ count += v
+
+ def send(self, data: bytes, flags: int = 0) -> int:
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to send")
+ return self._ssl_io_loop(self.sslobj.write, data)
+
+ def makefile(
+ self,
+ mode: str,
+ buffering: int | None = None,
+ *,
+ encoding: str | None = None,
+ errors: str | None = None,
+ newline: str | None = None,
+ ) -> typing.BinaryIO | typing.TextIO | socket.SocketIO:
+ """
+ Python's httpclient uses makefile and buffered io when reading HTTP
+ messages and we need to support it.
+
+ This is unfortunately a copy and paste of socket.py makefile with small
+ changes to point to the socket directly.
+ """
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError(f"invalid mode {mode!r} (only r, w, b allowed)")
+
+ writing = "w" in mode
+ reading = "r" in mode or not writing
+ assert reading or writing
+ binary = "b" in mode
+ rawmode = ""
+ if reading:
+ rawmode += "r"
+ if writing:
+ rawmode += "w"
+ raw = socket.SocketIO(self, rawmode) # type: ignore[arg-type]
+ self.socket._io_refs += 1 # type: ignore[attr-defined]
+ if buffering is None:
+ buffering = -1
+ if buffering < 0:
+ buffering = io.DEFAULT_BUFFER_SIZE
+ if buffering == 0:
+ if not binary:
+ raise ValueError("unbuffered streams must be binary")
+ return raw
+ buffer: typing.BinaryIO
+ if reading and writing:
+ buffer = io.BufferedRWPair(raw, raw, buffering) # type: ignore[assignment]
+ elif reading:
+ buffer = io.BufferedReader(raw, buffering)
+ else:
+ assert writing
+ buffer = io.BufferedWriter(raw, buffering)
+ if binary:
+ return buffer
+ text = io.TextIOWrapper(buffer, encoding, errors, newline)
+ text.mode = mode # type: ignore[misc]
+ return text
+
+ def unwrap(self) -> None:
+ self._ssl_io_loop(self.sslobj.unwrap)
+
+ def close(self) -> None:
+ self.socket.close()
+
+ @typing.overload
+ def getpeercert(
+ self, binary_form: typing.Literal[False] = ...
+ ) -> _TYPE_PEER_CERT_RET_DICT | None: ...
+
+ @typing.overload
+ def getpeercert(self, binary_form: typing.Literal[True]) -> bytes | None: ...
+
+ def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET:
+ return self.sslobj.getpeercert(binary_form) # type: ignore[return-value]
+
+ def version(self) -> str | None:
+ return self.sslobj.version()
+
+ def cipher(self) -> tuple[str, str, int] | None:
+ return self.sslobj.cipher()
+
+ def selected_alpn_protocol(self) -> str | None:
+ return self.sslobj.selected_alpn_protocol()
+
+ def shared_ciphers(self) -> list[tuple[str, str, int]] | None:
+ return self.sslobj.shared_ciphers()
+
+ def compression(self) -> str | None:
+ return self.sslobj.compression()
+
+ def settimeout(self, value: float | None) -> None:
+ self.socket.settimeout(value)
+
+ def gettimeout(self) -> float | None:
+ return self.socket.gettimeout()
+
+ def _decref_socketios(self) -> None:
+ self.socket._decref_socketios() # type: ignore[attr-defined]
+
+ def _wrap_ssl_read(self, len: int, buffer: bytearray | None = None) -> int | bytes:
+ try:
+ return self._ssl_io_loop(self.sslobj.read, len, buffer)
+ except ssl.SSLError as e:
+ if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
+ return 0 # eof, return 0.
+ else:
+ raise
+
+ # func is sslobj.do_handshake or sslobj.unwrap
+ @typing.overload
+ def _ssl_io_loop(self, func: typing.Callable[[], None]) -> None: ...
+
+ # func is sslobj.write, arg1 is data
+ @typing.overload
+ def _ssl_io_loop(self, func: typing.Callable[[bytes], int], arg1: bytes) -> int: ...
+
+ # func is sslobj.read, arg1 is len, arg2 is buffer
+ @typing.overload
+ def _ssl_io_loop(
+ self,
+ func: typing.Callable[[int, bytearray | None], bytes],
+ arg1: int,
+ arg2: bytearray | None,
+ ) -> bytes: ...
+
+ def _ssl_io_loop(
+ self,
+ func: typing.Callable[..., _ReturnValue],
+ arg1: None | bytes | int = None,
+ arg2: bytearray | None = None,
+ ) -> _ReturnValue:
+ """Performs an I/O loop between incoming/outgoing and the socket."""
+ should_loop = True
+ ret = None
+
+ while should_loop:
+ errno = None
+ try:
+ if arg1 is None and arg2 is None:
+ ret = func()
+ elif arg2 is None:
+ ret = func(arg1)
+ else:
+ ret = func(arg1, arg2)
+ except ssl.SSLError as e:
+ if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
+ # WANT_READ, and WANT_WRITE are expected, others are not.
+ raise e
+ errno = e.errno
+
+ buf = self.outgoing.read()
+ self.socket.sendall(buf)
+
+ if errno is None:
+ should_loop = False
+ elif errno == ssl.SSL_ERROR_WANT_READ:
+ buf = self.socket.recv(SSL_BLOCKSIZE)
+ if buf:
+ self.incoming.write(buf)
+ else:
+ self.incoming.write_eof()
+ return typing.cast(_ReturnValue, ret)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/timeout.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/timeout.py"
new file mode 100644
index 0000000..4bb1be1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/timeout.py"
@@ -0,0 +1,275 @@
+from __future__ import annotations
+
+import time
+import typing
+from enum import Enum
+from socket import getdefaulttimeout
+
+from ..exceptions import TimeoutStateError
+
+if typing.TYPE_CHECKING:
+ from typing import Final
+
+
+class _TYPE_DEFAULT(Enum):
+ # This value should never be passed to socket.settimeout() so for safety we use a -1.
+ # socket.settimout() raises a ValueError for negative values.
+ token = -1
+
+
+_DEFAULT_TIMEOUT: Final[_TYPE_DEFAULT] = _TYPE_DEFAULT.token
+
+_TYPE_TIMEOUT = typing.Optional[typing.Union[float, _TYPE_DEFAULT]]
+
+
+class Timeout:
+ """Timeout configuration.
+
+ Timeouts can be defined as a default for a pool:
+
+ .. code-block:: python
+
+ import urllib3
+
+ timeout = urllib3.util.Timeout(connect=2.0, read=7.0)
+
+ http = urllib3.PoolManager(timeout=timeout)
+
+ resp = http.request("GET", "https://example.com/")
+
+ print(resp.status)
+
+ Or per-request (which overrides the default for the pool):
+
+ .. code-block:: python
+
+ response = http.request("GET", "https://example.com/", timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``:
+
+ .. code-block:: python
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request("GET", "https://example.com/", timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: int, float, or None
+
+ :param connect:
+ The maximum amount of time (in seconds) to wait for a connection
+ attempt to a server to succeed. Omitting the parameter will default the
+ connect timeout to the system default, probably `the global default
+ timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: int, float, or None
+
+ :param read:
+ The maximum amount of time (in seconds) to wait between consecutive
+ read operations for a response from the server. Omitting the parameter
+ will default the read timeout to the system default, probably `the
+ global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout.
+
+ :type read: int, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT
+
+ def __init__(
+ self,
+ total: _TYPE_TIMEOUT = None,
+ connect: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ read: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
+ ) -> None:
+ self._connect = self._validate_timeout(connect, "connect")
+ self._read = self._validate_timeout(read, "read")
+ self.total = self._validate_timeout(total, "total")
+ self._start_connect: float | None = None
+
+ def __repr__(self) -> str:
+ return f"{type(self).__name__}(connect={self._connect!r}, read={self._read!r}, total={self.total!r})"
+
+ # __str__ provided for backwards compatibility
+ __str__ = __repr__
+
+ @staticmethod
+ def resolve_default_timeout(timeout: _TYPE_TIMEOUT) -> float | None:
+ return getdefaulttimeout() if timeout is _DEFAULT_TIMEOUT else timeout
+
+ @classmethod
+ def _validate_timeout(cls, value: _TYPE_TIMEOUT, name: str) -> _TYPE_TIMEOUT:
+ """Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If it is a numeric value less than or equal to
+ zero, or the type is not an integer, float, or None.
+ """
+ if value is None or value is _DEFAULT_TIMEOUT:
+ return value
+
+ if isinstance(value, bool):
+ raise ValueError(
+ "Timeout cannot be a boolean value. It must "
+ "be an int, float or None."
+ )
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ ) from None
+
+ try:
+ if value <= 0:
+ raise ValueError(
+ "Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than or equal to 0." % (name, value)
+ )
+ except TypeError:
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ ) from None
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout: _TYPE_TIMEOUT) -> Timeout:
+ """Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, :attr:`urllib3.util.Timeout.DEFAULT_TIMEOUT`, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self) -> Timeout:
+ """Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read, total=self.total)
+
+ def start_connect(self) -> float:
+ """Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = time.monotonic()
+ return self._start_connect
+
+ def get_connect_duration(self) -> float:
+ """Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time in seconds.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError(
+ "Can't get connect duration for timer that has not started."
+ )
+ return time.monotonic() - self._start_connect
+
+ @property
+ def connect_timeout(self) -> _TYPE_TIMEOUT:
+ """Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is _DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total) # type: ignore[type-var]
+
+ @property
+ def read_timeout(self) -> float | None:
+ """Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (
+ self.total is not None
+ and self.total is not _DEFAULT_TIMEOUT
+ and self._read is not None
+ and self._read is not _DEFAULT_TIMEOUT
+ ):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(), self._read))
+ elif self.total is not None and self.total is not _DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self.resolve_default_timeout(self._read)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/url.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/url.py"
new file mode 100644
index 0000000..db057f1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/url.py"
@@ -0,0 +1,469 @@
+from __future__ import annotations
+
+import re
+import typing
+
+from ..exceptions import LocationParseError
+from .util import to_str
+
+# We only want to normalize urls with an HTTP(S) scheme.
+# urllib3 infers URLs without a scheme (None) to be http.
+_NORMALIZABLE_SCHEMES = ("http", "https", None)
+
+# Almost all of these patterns were derived from the
+# 'rfc3986' module: https://github.com/python-hyper/rfc3986
+_PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
+_SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
+_URI_RE = re.compile(
+ r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
+ r"(?://([^\\/?#]*))?"
+ r"([^?#]*)"
+ r"(?:\?([^#]*))?"
+ r"(?:#(.*))?$",
+ re.UNICODE | re.DOTALL,
+)
+
+_IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
+_HEX_PAT = "[0-9A-Fa-f]{1,4}"
+_LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=_HEX_PAT, ipv4=_IPV4_PAT)
+_subs = {"hex": _HEX_PAT, "ls32": _LS32_PAT}
+_variations = [
+ # 6( h16 ":" ) ls32
+ "(?:%(hex)s:){6}%(ls32)s",
+ # "::" 5( h16 ":" ) ls32
+ "::(?:%(hex)s:){5}%(ls32)s",
+ # [ h16 ] "::" 4( h16 ":" ) ls32
+ "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
+ # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
+ # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
+ # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
+ # [ *4( h16 ":" ) h16 ] "::" ls32
+ "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+ # [ *5( h16 ":" ) h16 ] "::" h16
+ "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+ # [ *6( h16 ":" ) h16 ] "::"
+ "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+]
+
+_UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~"
+_IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+_ZONE_ID_PAT = "(?:%25|%)(?:[" + _UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+_IPV6_ADDRZ_PAT = r"\[" + _IPV6_PAT + r"(?:" + _ZONE_ID_PAT + r")?\]"
+_REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
+_TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
+
+_IPV4_RE = re.compile("^" + _IPV4_PAT + "$")
+_IPV6_RE = re.compile("^" + _IPV6_PAT + "$")
+_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT + "$")
+_BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT[2:-2] + "$")
+_ZONE_ID_RE = re.compile("(" + _ZONE_ID_PAT + r")\]$")
+
+_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % (
+ _REG_NAME_PAT,
+ _IPV4_PAT,
+ _IPV6_ADDRZ_PAT,
+)
+_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
+
+_UNRESERVED_CHARS = set(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
+)
+_SUB_DELIM_CHARS = set("!$&'()*+,;=")
+_USERINFO_CHARS = _UNRESERVED_CHARS | _SUB_DELIM_CHARS | {":"}
+_PATH_CHARS = _USERINFO_CHARS | {"@", "/"}
+_QUERY_CHARS = _FRAGMENT_CHARS = _PATH_CHARS | {"?"}
+
+
+class Url(
+ typing.NamedTuple(
+ "Url",
+ [
+ ("scheme", typing.Optional[str]),
+ ("auth", typing.Optional[str]),
+ ("host", typing.Optional[str]),
+ ("port", typing.Optional[int]),
+ ("path", typing.Optional[str]),
+ ("query", typing.Optional[str]),
+ ("fragment", typing.Optional[str]),
+ ],
+ )
+):
+ """
+ Data structure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`. Both the scheme and host are normalized as they are
+ both case-insensitive according to RFC 3986.
+ """
+
+ def __new__( # type: ignore[no-untyped-def]
+ cls,
+ scheme: str | None = None,
+ auth: str | None = None,
+ host: str | None = None,
+ port: int | None = None,
+ path: str | None = None,
+ query: str | None = None,
+ fragment: str | None = None,
+ ):
+ if path and not path.startswith("/"):
+ path = "/" + path
+ if scheme is not None:
+ scheme = scheme.lower()
+ return super().__new__(cls, scheme, auth, host, port, path, query, fragment)
+
+ @property
+ def hostname(self) -> str | None:
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self) -> str:
+ """Absolute path including the query string."""
+ uri = self.path or "/"
+
+ if self.query is not None:
+ uri += "?" + self.query
+
+ return uri
+
+ @property
+ def authority(self) -> str | None:
+ """
+ Authority component as defined in RFC 3986 3.2.
+ This includes userinfo (auth), host and port.
+
+ i.e.
+ userinfo@host:port
+ """
+ userinfo = self.auth
+ netloc = self.netloc
+ if netloc is None or userinfo is None:
+ return netloc
+ else:
+ return f"{userinfo}@{netloc}"
+
+ @property
+ def netloc(self) -> str | None:
+ """
+ Network location including host and port.
+
+ If you need the equivalent of urllib.parse's ``netloc``,
+ use the ``authority`` property instead.
+ """
+ if self.host is None:
+ return None
+ if self.port:
+ return f"{self.host}:{self.port}"
+ return self.host
+
+ @property
+ def url(self) -> str:
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example:
+
+ .. code-block:: python
+
+ import urllib3
+
+ U = urllib3.util.parse_url("https://google.com/mail/")
+
+ print(U.url)
+ # "https://google.com/mail/"
+
+ print( urllib3.util.Url("https", "username:password",
+ "host.com", 80, "/path", "query", "fragment"
+ ).url
+ )
+ # "https://username:password@host.com:80/path?query#fragment"
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = ""
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + "://"
+ if auth is not None:
+ url += auth + "@"
+ if host is not None:
+ url += host
+ if port is not None:
+ url += ":" + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += "?" + query
+ if fragment is not None:
+ url += "#" + fragment
+
+ return url
+
+ def __str__(self) -> str:
+ return self.url
+
+
+@typing.overload
+def _encode_invalid_chars(
+ component: str, allowed_chars: typing.Container[str]
+) -> str: # Abstract
+ ...
+
+
+@typing.overload
+def _encode_invalid_chars(
+ component: None, allowed_chars: typing.Container[str]
+) -> None: # Abstract
+ ...
+
+
+def _encode_invalid_chars(
+ component: str | None, allowed_chars: typing.Container[str]
+) -> str | None:
+ """Percent-encodes a URI component without reapplying
+ onto an already percent-encoded component.
+ """
+ if component is None:
+ return component
+
+ component = to_str(component)
+
+ # Normalize existing percent-encoded bytes.
+ # Try to see if the component we're encoding is already percent-encoded
+ # so we can skip all '%' characters but still encode all others.
+ component, percent_encodings = _PERCENT_RE.subn(
+ lambda match: match.group(0).upper(), component
+ )
+
+ uri_bytes = component.encode("utf-8", "surrogatepass")
+ is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
+ encoded_component = bytearray()
+
+ for i in range(0, len(uri_bytes)):
+ # Will return a single character bytestring
+ byte = uri_bytes[i : i + 1]
+ byte_ord = ord(byte)
+ if (is_percent_encoded and byte == b"%") or (
+ byte_ord < 128 and byte.decode() in allowed_chars
+ ):
+ encoded_component += byte
+ continue
+ encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
+
+ return encoded_component.decode()
+
+
+def _remove_path_dot_segments(path: str) -> str:
+ # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
+ segments = path.split("/") # Turn the path into a list of segments
+ output = [] # Initialize the variable to use to store output
+
+ for segment in segments:
+ # '.' is the current directory, so ignore it, it is superfluous
+ if segment == ".":
+ continue
+ # Anything other than '..', should be appended to the output
+ if segment != "..":
+ output.append(segment)
+ # In this case segment == '..', if we can, we should pop the last
+ # element
+ elif output:
+ output.pop()
+
+ # If the path starts with '/' and the output is empty or the first string
+ # is non-empty
+ if path.startswith("/") and (not output or output[0]):
+ output.insert(0, "")
+
+ # If the path starts with '/.' or '/..' ensure we add one more empty
+ # string to add a trailing '/'
+ if path.endswith(("/.", "/..")):
+ output.append("")
+
+ return "/".join(output)
+
+
+@typing.overload
+def _normalize_host(host: None, scheme: str | None) -> None: ...
+
+
+@typing.overload
+def _normalize_host(host: str, scheme: str | None) -> str: ...
+
+
+def _normalize_host(host: str | None, scheme: str | None) -> str | None:
+ if host:
+ if scheme in _NORMALIZABLE_SCHEMES:
+ is_ipv6 = _IPV6_ADDRZ_RE.match(host)
+ if is_ipv6:
+ # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as
+ # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID
+ # separator as necessary to return a valid RFC 4007 scoped IP.
+ match = _ZONE_ID_RE.search(host)
+ if match:
+ start, end = match.span(1)
+ zone_id = host[start:end]
+
+ if zone_id.startswith("%25") and zone_id != "%25":
+ zone_id = zone_id[3:]
+ else:
+ zone_id = zone_id[1:]
+ zone_id = _encode_invalid_chars(zone_id, _UNRESERVED_CHARS)
+ return f"{host[:start].lower()}%{zone_id}{host[end:]}"
+ else:
+ return host.lower()
+ elif not _IPV4_RE.match(host):
+ return to_str(
+ b".".join([_idna_encode(label) for label in host.split(".")]),
+ "ascii",
+ )
+ return host
+
+
+def _idna_encode(name: str) -> bytes:
+ if not name.isascii():
+ try:
+ import idna
+ except ImportError:
+ raise LocationParseError(
+ "Unable to parse URL without the 'idna' module"
+ ) from None
+
+ try:
+ return idna.encode(name.lower(), strict=True, std3_rules=True)
+ except idna.IDNAError:
+ raise LocationParseError(
+ f"Name '{name}' is not a valid IDNA label"
+ ) from None
+
+ return name.lower().encode("ascii")
+
+
+def _encode_target(target: str) -> str:
+ """Percent-encodes a request target so that there are no invalid characters
+
+ Pre-condition for this function is that 'target' must start with '/'.
+ If that is the case then _TARGET_RE will always produce a match.
+ """
+ match = _TARGET_RE.match(target)
+ if not match: # Defensive:
+ raise LocationParseError(f"{target!r} is not a valid request URI")
+
+ path, query = match.groups()
+ encoded_target = _encode_invalid_chars(path, _PATH_CHARS)
+ if query is not None:
+ query = _encode_invalid_chars(query, _QUERY_CHARS)
+ encoded_target += "?" + query
+ return encoded_target
+
+
+def parse_url(url: str) -> Url:
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+ This parser is RFC 3986 and RFC 6874 compliant.
+
+ The parser logic and helper functions are based heavily on
+ work done in the ``rfc3986`` module.
+
+ :param str url: URL to parse into a :class:`.Url` namedtuple.
+
+ Partly backwards-compatible with :mod:`urllib.parse`.
+
+ Example:
+
+ .. code-block:: python
+
+ import urllib3
+
+ print( urllib3.util.parse_url('http://google.com/mail/'))
+ # Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+
+ print( urllib3.util.parse_url('google.com:80'))
+ # Url(scheme=None, host='google.com', port=80, path=None, ...)
+
+ print( urllib3.util.parse_url('/foo?bar'))
+ # Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+ if not url:
+ # Empty
+ return Url()
+
+ source_url = url
+ if not _SCHEME_RE.search(url):
+ url = "//" + url
+
+ scheme: str | None
+ authority: str | None
+ auth: str | None
+ host: str | None
+ port: str | None
+ port_int: int | None
+ path: str | None
+ query: str | None
+ fragment: str | None
+
+ try:
+ scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr]
+ normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES
+
+ if scheme:
+ scheme = scheme.lower()
+
+ if authority:
+ auth, _, host_port = authority.rpartition("@")
+ auth = auth or None
+ host, port = _HOST_PORT_RE.match(host_port).groups() # type: ignore[union-attr]
+ if auth and normalize_uri:
+ auth = _encode_invalid_chars(auth, _USERINFO_CHARS)
+ if port == "":
+ port = None
+ else:
+ auth, host, port = None, None, None
+
+ if port is not None:
+ port_int = int(port)
+ if not (0 <= port_int <= 65535):
+ raise LocationParseError(url)
+ else:
+ port_int = None
+
+ host = _normalize_host(host, scheme)
+
+ if normalize_uri and path:
+ path = _remove_path_dot_segments(path)
+ path = _encode_invalid_chars(path, _PATH_CHARS)
+ if normalize_uri and query:
+ query = _encode_invalid_chars(query, _QUERY_CHARS)
+ if normalize_uri and fragment:
+ fragment = _encode_invalid_chars(fragment, _FRAGMENT_CHARS)
+
+ except (ValueError, AttributeError) as e:
+ raise LocationParseError(source_url) from e
+
+ # For the sake of backwards compatibility we put empty
+ # string values for path if there are any defined values
+ # beyond the path in the URL.
+ # TODO: Remove this when we break backwards compatibility.
+ if not path:
+ if query is not None or fragment is not None:
+ path = ""
+ else:
+ path = None
+
+ return Url(
+ scheme=scheme,
+ auth=auth,
+ host=host,
+ port=port_int,
+ path=path,
+ query=query,
+ fragment=fragment,
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/util.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/util.py"
new file mode 100644
index 0000000..35c77e4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/util.py"
@@ -0,0 +1,42 @@
+from __future__ import annotations
+
+import typing
+from types import TracebackType
+
+
+def to_bytes(
+ x: str | bytes, encoding: str | None = None, errors: str | None = None
+) -> bytes:
+ if isinstance(x, bytes):
+ return x
+ elif not isinstance(x, str):
+ raise TypeError(f"not expecting type {type(x).__name__}")
+ if encoding or errors:
+ return x.encode(encoding or "utf-8", errors=errors or "strict")
+ return x.encode()
+
+
+def to_str(
+ x: str | bytes, encoding: str | None = None, errors: str | None = None
+) -> str:
+ if isinstance(x, str):
+ return x
+ elif not isinstance(x, bytes):
+ raise TypeError(f"not expecting type {type(x).__name__}")
+ if encoding or errors:
+ return x.decode(encoding or "utf-8", errors=errors or "strict")
+ return x.decode()
+
+
+def reraise(
+ tp: type[BaseException] | None,
+ value: BaseException,
+ tb: TracebackType | None = None,
+) -> typing.NoReturn:
+ try:
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None # type: ignore[assignment]
+ tb = None
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/wait.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/wait.py"
new file mode 100644
index 0000000..aeca0c7
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/urllib3/util/wait.py"
@@ -0,0 +1,124 @@
+from __future__ import annotations
+
+import select
+import socket
+from functools import partial
+
+__all__ = ["wait_for_read", "wait_for_write"]
+
+
+# How should we wait on sockets?
+#
+# There are two types of APIs you can use for waiting on sockets: the fancy
+# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
+# select/poll. The stateful APIs are more efficient when you have a lots of
+# sockets to keep track of, because you can set them up once and then use them
+# lots of times. But we only ever want to wait on a single socket at a time
+# and don't want to keep track of state, so the stateless APIs are actually
+# more efficient. So we want to use select() or poll().
+#
+# Now, how do we choose between select() and poll()? On traditional Unixes,
+# select() has a strange calling convention that makes it slow, or fail
+# altogether, for high-numbered file descriptors. The point of poll() is to fix
+# that, so on Unixes, we prefer poll().
+#
+# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
+# for it), but that's OK, because on Windows, select() doesn't have this
+# strange calling convention; plain select() works fine.
+#
+# So: on Windows we use select(), and everywhere else we use poll(). We also
+# fall back to select() in case poll() is somehow broken or missing.
+
+
+def select_wait_for_socket(
+ sock: socket.socket,
+ read: bool = False,
+ write: bool = False,
+ timeout: float | None = None,
+) -> bool:
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ rcheck = []
+ wcheck = []
+ if read:
+ rcheck.append(sock)
+ if write:
+ wcheck.append(sock)
+ # When doing a non-blocking connect, most systems signal success by
+ # marking the socket writable. Windows, though, signals success by marked
+ # it as "exceptional". We paper over the difference by checking the write
+ # sockets for both conditions. (The stdlib selectors module does the same
+ # thing.)
+ fn = partial(select.select, rcheck, wcheck, wcheck)
+ rready, wready, xready = fn(timeout)
+ return bool(rready or wready or xready)
+
+
+def poll_wait_for_socket(
+ sock: socket.socket,
+ read: bool = False,
+ write: bool = False,
+ timeout: float | None = None,
+) -> bool:
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ mask = 0
+ if read:
+ mask |= select.POLLIN
+ if write:
+ mask |= select.POLLOUT
+ poll_obj = select.poll()
+ poll_obj.register(sock, mask)
+
+ # For some reason, poll() takes timeout in milliseconds
+ def do_poll(t: float | None) -> list[tuple[int, int]]:
+ if t is not None:
+ t *= 1000
+ return poll_obj.poll(t)
+
+ return bool(do_poll(timeout))
+
+
+def _have_working_poll() -> bool:
+ # Apparently some systems have a select.poll that fails as soon as you try
+ # to use it, either due to strange configuration or broken monkeypatching
+ # from libraries like eventlet/greenlet.
+ try:
+ poll_obj = select.poll()
+ poll_obj.poll(0)
+ except (AttributeError, OSError):
+ return False
+ else:
+ return True
+
+
+def wait_for_socket(
+ sock: socket.socket,
+ read: bool = False,
+ write: bool = False,
+ timeout: float | None = None,
+) -> bool:
+ # We delay choosing which implementation to use until the first time we're
+ # called. We could do it at import time, but then we might make the wrong
+ # decision if someone goes wild with monkeypatching select.poll after
+ # we're imported.
+ global wait_for_socket
+ if _have_working_poll():
+ wait_for_socket = poll_wait_for_socket
+ elif hasattr(select, "select"):
+ wait_for_socket = select_wait_for_socket
+ return wait_for_socket(sock, read, write, timeout)
+
+
+def wait_for_read(sock: socket.socket, timeout: float | None = None) -> bool:
+ """Waits for reading to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, read=True, timeout=timeout)
+
+
+def wait_for_write(sock: socket.socket, timeout: float | None = None) -> bool:
+ """Waits for writing to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, write=True, timeout=timeout)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/INSTALLER" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/INSTALLER"
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/INSTALLER"
@@ -0,0 +1 @@
+pip
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/METADATA" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/METADATA"
new file mode 100644
index 0000000..9556b21
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/METADATA"
@@ -0,0 +1,2478 @@
+Metadata-Version: 2.4
+Name: yarl
+Version: 1.22.0
+Summary: Yet another URL library
+Home-page: https://github.com/aio-libs/yarl
+Author: Andrew Svetlov
+Author-email: andrew.svetlov@gmail.com
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache-2.0
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
+Project-URL: CI: GitHub Workflows, https://github.com/aio-libs/yarl/actions?query=branch:master
+Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/yarl
+Project-URL: Docs: Changelog, https://yarl.aio-libs.org/en/latest/changes/
+Project-URL: Docs: RTD, https://yarl.aio-libs.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/yarl/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/yarl
+Keywords: cython,cext,yarl
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Cython
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.9
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: NOTICE
+Requires-Dist: idna>=2.0
+Requires-Dist: multidict>=4.0
+Requires-Dist: propcache>=0.2.1
+Dynamic: license-file
+
+yarl
+====
+
+The module provides handy URL class for URL parsing and changing.
+
+.. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI
+ :align: right
+
+.. image:: https://codecov.io/gh/aio-libs/yarl/graph/badge.svg?flag=pytest
+ :target: https://app.codecov.io/gh/aio-libs/yarl?flags[]=pytest
+ :alt: Codecov coverage for the pytest-driven measurements
+
+.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
+ :target: https://codspeed.io/aio-libs/yarl
+
+.. image:: https://badge.fury.io/py/yarl.svg
+ :target: https://badge.fury.io/py/yarl
+
+.. image:: https://readthedocs.org/projects/yarl/badge/?version=latest
+ :target: https://yarl.aio-libs.org
+
+.. image:: https://img.shields.io/pypi/pyversions/yarl.svg
+ :target: https://pypi.python.org/pypi/yarl
+
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
+
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
+
+
+Introduction
+------------
+
+Url is constructed from ``str``:
+
+.. code-block:: pycon
+
+ >>> from yarl import URL
+ >>> url = URL('https://www.python.org/~guido?arg=1#frag')
+ >>> url
+ URL('https://www.python.org/~guido?arg=1#frag')
+
+All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*,
+*query* and *fragment* are accessible by properties:
+
+.. code-block:: pycon
+
+ >>> url.scheme
+ 'https'
+ >>> url.host
+ 'www.python.org'
+ >>> url.path
+ '/~guido'
+ >>> url.query_string
+ 'arg=1'
+ >>> url.query
+ <MultiDictProxy('arg': '1')>
+ >>> url.fragment
+ 'frag'
+
+All url manipulations produce a new url object:
+
+.. code-block:: pycon
+
+ >>> url = URL('https://www.python.org')
+ >>> url / 'foo' / 'bar'
+ URL('https://www.python.org/foo/bar')
+ >>> url / 'foo' % {'bar': 'baz'}
+ URL('https://www.python.org/foo?bar=baz')
+
+Strings passed to constructor and modification methods are
+automatically encoded giving canonical representation as result:
+
+.. code-block:: pycon
+
+ >>> url = URL('https://www.python.org/шлях')
+ >>> url
+ URL('https://www.python.org/%D1%88%D0%BB%D1%8F%D1%85')
+
+Regular properties are *percent-decoded*, use ``raw_`` versions for
+getting *encoded* strings:
+
+.. code-block:: pycon
+
+ >>> url.path
+ '/шлях'
+
+ >>> url.raw_path
+ '/%D1%88%D0%BB%D1%8F%D1%85'
+
+Human readable representation of URL is available as ``.human_repr()``:
+
+.. code-block:: pycon
+
+ >>> url.human_repr()
+ 'https://www.python.org/шлях'
+
+For full documentation please read https://yarl.aio-libs.org.
+
+
+Installation
+------------
+
+::
+
+ $ pip install yarl
+
+The library is Python 3 only!
+
+PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
+``yarl`` on another operating system where wheels are not provided,
+the tarball will be used to compile the library from
+the source code. It requires a C compiler and and Python headers installed.
+
+To skip the compilation you must explicitly opt-in by using a PEP 517
+configuration setting ``pure-python``, or setting the ``YARL_NO_EXTENSIONS``
+environment variable to a non-empty value, e.g.:
+
+.. code-block:: console
+
+ $ pip install yarl --config-settings=pure-python=false
+
+Please note that the pure-Python (uncompiled) version is much slower. However,
+PyPy always uses a pure-Python implementation, and, as such, it is unaffected
+by this variable.
+
+Dependencies
+------------
+
+YARL requires multidict_ and propcache_ libraries.
+
+
+API documentation
+------------------
+
+The documentation is located at https://yarl.aio-libs.org.
+
+
+Why isn't boolean supported by the URL query API?
+-------------------------------------------------
+
+There is no standard for boolean representation of boolean values.
+
+Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``,
+``Y``/``N``, ``1``/``0``, etc.
+
+``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because
+it is specific to how the end-user's application is built and would be different for
+different apps. The library doesn't accept booleans in the API; a user should convert
+bools into strings using own preferred translation protocol.
+
+
+Comparison with other URL libraries
+------------------------------------
+
+* furl (https://pypi.python.org/pypi/furl)
+
+ The library has rich functionality but the ``furl`` object is mutable.
+
+ I'm afraid to pass this object into foreign code: who knows if the
+ code will modify my url in a terrible way while I just want to send URL
+ with handy helpers for accessing URL properties.
+
+ ``furl`` has other non-obvious tricky things but the main objection
+ is mutability.
+
+* URLObject (https://pypi.python.org/pypi/URLObject)
+
+ URLObject is immutable, that's pretty good.
+
+ Every URL change generates a new URL object.
+
+ But the library doesn't do any decode/encode transformations leaving the
+ end user to cope with these gory details.
+
+
+Source code
+-----------
+
+The project is hosted on GitHub_
+
+Please file an issue on the `bug tracker
+<https://github.com/aio-libs/yarl/issues>`_ if you have found a bug
+or have some suggestion in order to improve the library.
+
+Discussion list
+---------------
+
+*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
+
+Feel free to post your questions and ideas here.
+
+
+Authors and License
+-------------------
+
+The ``yarl`` package is written by Andrew Svetlov.
+
+It's *Apache 2* licensed and freely available.
+
+
+.. _GitHub: https://github.com/aio-libs/yarl
+
+.. _multidict: https://github.com/aio-libs/multidict
+
+.. _propcache: https://github.com/aio-libs/propcache
+
+=========
+Changelog
+=========
+
+..
+ You should *NOT* be adding new change log entries to this file, this
+ file is managed by towncrier. You *may* edit previous change logs to
+ fix problems like typo corrections or such.
+ To add a new change log entry, please see
+ https://pip.pypa.io/en/latest/development/#adding-a-news-entry
+ we named the news folder "changes".
+
+ WARNING: Don't drop the next directive!
+
+.. towncrier release notes start
+
+1.22.0
+======
+
+*(2025-10-05)*
+
+
+Features
+--------
+
+- Added arm64 Windows wheel builds
+ -- by `@finnagin <https://github.com/sponsors/finnagin>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1516 <https://github.com/aio-libs/yarl/issues/1516>`__.
+
+
+----
+
+
+1.21.0
+======
+
+*(2025-10-05)*
+
+
+Contributor-facing changes
+--------------------------
+
+- The ``reusable-cibuildwheel.yml`` workflow has been refactored to
+ be more generic and ``ci-cd.yml`` now holds all the configuration
+ toggles -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1535 <https://github.com/aio-libs/yarl/issues/1535>`__.
+
+- When building wheels, the source distribution is now passed directly
+ to the ``cibuildwheel`` invocation -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1536 <https://github.com/aio-libs/yarl/issues/1536>`__.
+
+- Added CI for Python 3.14 -- by `@kumaraditya303 <https://github.com/sponsors/kumaraditya303>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1560 <https://github.com/aio-libs/yarl/issues/1560>`__.
+
+
+----
+
+
+1.20.1
+======
+
+*(2025-06-09)*
+
+
+Bug fixes
+---------
+
+- Started raising a ``ValueError`` exception raised for corrupted
+ IPv6 URL values.
+
+ These fixes the issue where exception ``IndexError`` was
+ leaking from the internal code because of not being handled and
+ transformed into a user-facing error. The problem was happening
+ under the following conditions: empty IPv6 URL, brackets in
+ reverse order.
+
+ -- by `@MaelPic <https://github.com/sponsors/MaelPic>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1512 <https://github.com/aio-libs/yarl/issues/1512>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1514 <https://github.com/aio-libs/yarl/issues/1514>`__.
+
+- Made Cython line tracing opt-in via the ``with-cython-tracing`` build config setting -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Previously, line tracing was enabled by default in ``pyproject.toml``, which caused build issues for some users and made wheels nearly twice as slow.
+ Now line tracing is only enabled when explicitly requested via ``pip install . --config-setting=with-cython-tracing=true`` or by setting the ``YARL_CYTHON_TRACING`` environment variable.
+
+ *Related issues and pull requests on GitHub:*
+ `#1521 <https://github.com/aio-libs/yarl/issues/1521>`__.
+
+
+----
+
+
+1.20.0
+======
+
+*(2025-04-16)*
+
+
+Features
+--------
+
+- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1456 <https://github.com/aio-libs/yarl/issues/1456>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1456 <https://github.com/aio-libs/yarl/issues/1456>`__.
+
+
+----
+
+
+1.19.0
+======
+
+*(2025-04-05)*
+
+
+Bug fixes
+---------
+
+- Fixed entire name being re-encoded when using ``yarl.URL.with_suffix()`` -- by `@NTFSvolume <https://github.com/sponsors/NTFSvolume>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1468 <https://github.com/aio-libs/yarl/issues/1468>`__.
+
+
+Features
+--------
+
+- Started building armv7l wheels for manylinux -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1495 <https://github.com/aio-libs/yarl/issues/1495>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem
+ dependencies using `re-actors/cache-python-deps`_ -- an action by
+ `@webknjaz <https://github.com/sponsors/webknjaz>`__ that takes into account ABI stability and the exact
+ version of Python runtime.
+
+ .. _`re-actors/cache-python-deps`:
+ https://github.com/marketplace/actions/cache-python-deps
+
+ *Related issues and pull requests on GitHub:*
+ `#1471 <https://github.com/aio-libs/yarl/issues/1471>`__.
+
+- Increased minimum `propcache`_ version to 0.2.1 to fix failing tests -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ .. _`propcache`:
+ https://github.com/aio-libs/propcache
+
+ *Related issues and pull requests on GitHub:*
+ `#1479 <https://github.com/aio-libs/yarl/issues/1479>`__.
+
+- Added all hidden folders to pytest's ``norecursedirs`` to prevent it
+ from trying to collect tests there -- by `@lysnikolaou <https://github.com/sponsors/lysnikolaou>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1480 <https://github.com/aio-libs/yarl/issues/1480>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved accuracy of type annotations -- by `@Dreamsorcerer <https://github.com/sponsors/Dreamsorcerer>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1484 <https://github.com/aio-libs/yarl/issues/1484>`__.
+
+- Improved performance of parsing query strings -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1493 <https://github.com/aio-libs/yarl/issues/1493>`__, `#1497 <https://github.com/aio-libs/yarl/issues/1497>`__.
+
+- Improved performance of the C unquoter -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1496 <https://github.com/aio-libs/yarl/issues/1496>`__, `#1498 <https://github.com/aio-libs/yarl/issues/1498>`__.
+
+
+----
+
+
+1.18.3
+======
+
+*(2024-12-01)*
+
+
+Bug fixes
+---------
+
+- Fixed uppercase ASCII hosts being rejected by ``URL.build()()`` and ``yarl.URL.with_host()`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#954 <https://github.com/aio-libs/yarl/issues/954>`__, `#1442 <https://github.com/aio-libs/yarl/issues/1442>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performances of multiple path properties on cache miss -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1443 <https://github.com/aio-libs/yarl/issues/1443>`__.
+
+
+----
+
+
+1.18.2
+======
+
+*(2024-11-29)*
+
+
+No significant changes.
+
+
+----
+
+
+1.18.1
+======
+
+*(2024-11-29)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved cache performance when ``~yarl.URL`` objects are constructed from ``yarl.URL.build()`` with ``encoded=True`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1432 <https://github.com/aio-libs/yarl/issues/1432>`__.
+
+- Improved cache performance for operations that produce a new ``~yarl.URL`` object -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1434 <https://github.com/aio-libs/yarl/issues/1434>`__, `#1436 <https://github.com/aio-libs/yarl/issues/1436>`__.
+
+
+----
+
+
+1.18.0
+======
+
+*(2024-11-21)*
+
+
+Features
+--------
+
+- Added ``keep_query`` and ``keep_fragment`` flags in the ``yarl.URL.with_path()``, ``yarl.URL.with_name()`` and ``yarl.URL.with_suffix()`` methods, allowing users to optionally retain the query string and fragment in the resulting URL when replacing the path -- by `@paul-nameless <https://github.com/sponsors/paul-nameless>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#111 <https://github.com/aio-libs/yarl/issues/111>`__, `#1421 <https://github.com/aio-libs/yarl/issues/1421>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- Started running downstream ``aiohttp`` tests in CI -- by `@Cycloctane <https://github.com/sponsors/Cycloctane>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1415 <https://github.com/aio-libs/yarl/issues/1415>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of converting ``~yarl.URL`` to a string -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1422 <https://github.com/aio-libs/yarl/issues/1422>`__.
+
+
+----
+
+
+1.17.2
+======
+
+*(2024-11-17)*
+
+
+Bug fixes
+---------
+
+- Stopped implicitly allowing the use of Cython pre-release versions when
+ building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and
+ `@markgreene74 <https://github.com/sponsors/markgreene74>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1411 <https://github.com/aio-libs/yarl/issues/1411>`__, `#1412 <https://github.com/aio-libs/yarl/issues/1412>`__.
+
+- Fixed a bug causing ``~yarl.URL.port`` to return the default port when the given port was zero
+ -- by `@gmacon <https://github.com/sponsors/gmacon>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1413 <https://github.com/aio-libs/yarl/issues/1413>`__.
+
+
+Features
+--------
+
+- Make error messages include details of incorrect type when ``port`` is not int in ``yarl.URL.build()``.
+ -- by `@Cycloctane <https://github.com/sponsors/Cycloctane>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1414 <https://github.com/aio-libs/yarl/issues/1414>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Stopped implicitly allowing the use of Cython pre-release versions when
+ building the distribution package -- by `@ajsanchezsanz <https://github.com/sponsors/ajsanchezsanz>`__ and
+ `@markgreene74 <https://github.com/sponsors/markgreene74>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1411 <https://github.com/aio-libs/yarl/issues/1411>`__, `#1412 <https://github.com/aio-libs/yarl/issues/1412>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of the ``yarl.URL.joinpath()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1418 <https://github.com/aio-libs/yarl/issues/1418>`__.
+
+
+----
+
+
+1.17.1
+======
+
+*(2024-10-30)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of many ``~yarl.URL`` methods -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1396 <https://github.com/aio-libs/yarl/issues/1396>`__, `#1397 <https://github.com/aio-libs/yarl/issues/1397>`__, `#1398 <https://github.com/aio-libs/yarl/issues/1398>`__.
+
+- Improved performance of passing a `dict` or `str` to ``yarl.URL.extend_query()`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1401 <https://github.com/aio-libs/yarl/issues/1401>`__.
+
+
+----
+
+
+1.17.0
+======
+
+*(2024-10-28)*
+
+
+Features
+--------
+
+- Added ``~yarl.URL.host_port_subcomponent`` which returns the ``3986#section-3.2.2`` host and ``3986#section-3.2.3`` port subcomponent -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1375 <https://github.com/aio-libs/yarl/issues/1375>`__.
+
+
+----
+
+
+1.16.0
+======
+
+*(2024-10-21)*
+
+
+Bug fixes
+---------
+
+- Fixed blocking I/O to load Python code when creating a new ``~yarl.URL`` with non-ascii characters in the network location part -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1342 <https://github.com/aio-libs/yarl/issues/1342>`__.
+
+
+Removals and backward incompatible breaking changes
+---------------------------------------------------
+
+- Migrated to using a single cache for encoding hosts -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Passing ``ip_address_size`` and ``host_validate_size`` to ``yarl.cache_configure()`` is deprecated in favor of the new ``encode_host_size`` parameter and will be removed in a future release. For backwards compatibility, the old parameters affect the ``encode_host`` cache size.
+
+ *Related issues and pull requests on GitHub:*
+ `#1348 <https://github.com/aio-libs/yarl/issues/1348>`__, `#1357 <https://github.com/aio-libs/yarl/issues/1357>`__, `#1363 <https://github.com/aio-libs/yarl/issues/1363>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of constructing ``~yarl.URL`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1336 <https://github.com/aio-libs/yarl/issues/1336>`__.
+
+- Improved performance of calling ``yarl.URL.build()`` and constructing unencoded ``~yarl.URL`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1345 <https://github.com/aio-libs/yarl/issues/1345>`__.
+
+- Reworked the internal encoding cache to improve performance on cache hit -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1369 <https://github.com/aio-libs/yarl/issues/1369>`__.
+
+
+----
+
+
+1.15.5
+======
+
+*(2024-10-18)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of the ``yarl.URL.joinpath()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1304 <https://github.com/aio-libs/yarl/issues/1304>`__.
+
+- Improved performance of the ``yarl.URL.extend_query()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1305 <https://github.com/aio-libs/yarl/issues/1305>`__.
+
+- Improved performance of the ``yarl.URL.origin()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1306 <https://github.com/aio-libs/yarl/issues/1306>`__.
+
+- Improved performance of the ``yarl.URL.with_path()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1307 <https://github.com/aio-libs/yarl/issues/1307>`__.
+
+- Improved performance of the ``yarl.URL.with_query()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1308 <https://github.com/aio-libs/yarl/issues/1308>`__, `#1328 <https://github.com/aio-libs/yarl/issues/1328>`__.
+
+- Improved performance of the ``yarl.URL.update_query()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1309 <https://github.com/aio-libs/yarl/issues/1309>`__, `#1327 <https://github.com/aio-libs/yarl/issues/1327>`__.
+
+- Improved performance of the ``yarl.URL.join()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1313 <https://github.com/aio-libs/yarl/issues/1313>`__.
+
+- Improved performance of ``~yarl.URL`` equality checks -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1315 <https://github.com/aio-libs/yarl/issues/1315>`__.
+
+- Improved performance of ``~yarl.URL`` methods that modify the network location -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1316 <https://github.com/aio-libs/yarl/issues/1316>`__.
+
+- Improved performance of the ``yarl.URL.with_fragment()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1317 <https://github.com/aio-libs/yarl/issues/1317>`__.
+
+- Improved performance of calculating the hash of ``~yarl.URL`` objects -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1318 <https://github.com/aio-libs/yarl/issues/1318>`__.
+
+- Improved performance of the ``yarl.URL.relative()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1319 <https://github.com/aio-libs/yarl/issues/1319>`__.
+
+- Improved performance of the ``yarl.URL.with_name()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1320 <https://github.com/aio-libs/yarl/issues/1320>`__.
+
+- Improved performance of ``~yarl.URL.parent`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1321 <https://github.com/aio-libs/yarl/issues/1321>`__.
+
+- Improved performance of the ``yarl.URL.with_scheme()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1322 <https://github.com/aio-libs/yarl/issues/1322>`__.
+
+
+----
+
+
+1.15.4
+======
+
+*(2024-10-16)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of the quoter when all characters are safe -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1288 <https://github.com/aio-libs/yarl/issues/1288>`__.
+
+- Improved performance of unquoting strings -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1292 <https://github.com/aio-libs/yarl/issues/1292>`__, `#1293 <https://github.com/aio-libs/yarl/issues/1293>`__.
+
+- Improved performance of calling ``yarl.URL.build()`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1297 <https://github.com/aio-libs/yarl/issues/1297>`__.
+
+
+----
+
+
+1.15.3
+======
+
+*(2024-10-15)*
+
+
+Bug fixes
+---------
+
+- Fixed ``yarl.URL.build()`` failing to validate paths must start with a ``/`` when passing ``authority`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ The validation only worked correctly when passing ``host``.
+
+ *Related issues and pull requests on GitHub:*
+ `#1265 <https://github.com/aio-libs/yarl/issues/1265>`__.
+
+
+Removals and backward incompatible breaking changes
+---------------------------------------------------
+
+- Removed support for Python 3.8 as it has reached end of life -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1203 <https://github.com/aio-libs/yarl/issues/1203>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of constructing ``~yarl.URL`` when the net location is only the host -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1271 <https://github.com/aio-libs/yarl/issues/1271>`__.
+
+
+----
+
+
+1.15.2
+======
+
+*(2024-10-13)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of converting ``~yarl.URL`` to a string -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1234 <https://github.com/aio-libs/yarl/issues/1234>`__.
+
+- Improved performance of ``yarl.URL.joinpath()`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1248 <https://github.com/aio-libs/yarl/issues/1248>`__, `#1250 <https://github.com/aio-libs/yarl/issues/1250>`__.
+
+- Improved performance of constructing query strings from ``~multidict.MultiDict`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1256 <https://github.com/aio-libs/yarl/issues/1256>`__.
+
+- Improved performance of constructing query strings with ``int`` values -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1259 <https://github.com/aio-libs/yarl/issues/1259>`__.
+
+
+----
+
+
+1.15.1
+======
+
+*(2024-10-12)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of calling ``yarl.URL.build()`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1222 <https://github.com/aio-libs/yarl/issues/1222>`__.
+
+- Improved performance of all ``~yarl.URL`` methods that create new ``~yarl.URL`` objects -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1226 <https://github.com/aio-libs/yarl/issues/1226>`__.
+
+- Improved performance of ``~yarl.URL`` methods that modify the network location -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1229 <https://github.com/aio-libs/yarl/issues/1229>`__.
+
+
+----
+
+
+1.15.0
+======
+
+*(2024-10-11)*
+
+
+Bug fixes
+---------
+
+- Fixed validation with ``yarl.URL.with_scheme()`` when passed scheme is not lowercase -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1189 <https://github.com/aio-libs/yarl/issues/1189>`__.
+
+
+Features
+--------
+
+- Started building ``armv7l`` wheels -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1204 <https://github.com/aio-libs/yarl/issues/1204>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of constructing unencoded ``~yarl.URL`` objects -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1188 <https://github.com/aio-libs/yarl/issues/1188>`__.
+
+- Added a cache for parsing hosts to reduce overhead of encoding ``~yarl.URL`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1190 <https://github.com/aio-libs/yarl/issues/1190>`__.
+
+- Improved performance of constructing query strings from ``~collections.abc.Mapping`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1193 <https://github.com/aio-libs/yarl/issues/1193>`__.
+
+- Improved performance of converting ``~yarl.URL`` objects to strings -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1198 <https://github.com/aio-libs/yarl/issues/1198>`__.
+
+
+----
+
+
+1.14.0
+======
+
+*(2024-10-08)*
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Switched to using the ``propcache`` package for property caching
+ -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ The ``propcache`` package is derived from the property caching
+ code in ``yarl`` and has been broken out to avoid maintaining it for multiple
+ projects.
+
+ *Related issues and pull requests on GitHub:*
+ `#1169 <https://github.com/aio-libs/yarl/issues/1169>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- Started testing with Hypothesis -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__ and `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Special thanks to `@Zac-HD <https://github.com/sponsors/Zac-HD>`__ for helping us get started with this framework.
+
+ *Related issues and pull requests on GitHub:*
+ `#860 <https://github.com/aio-libs/yarl/issues/860>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of ``yarl.URL.is_default_port()`` when no explicit port is set -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1168 <https://github.com/aio-libs/yarl/issues/1168>`__.
+
+- Improved performance of converting ``~yarl.URL`` to a string when no explicit port is set -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1170 <https://github.com/aio-libs/yarl/issues/1170>`__.
+
+- Improved performance of the ``yarl.URL.origin()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1175 <https://github.com/aio-libs/yarl/issues/1175>`__.
+
+- Improved performance of encoding hosts -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1176 <https://github.com/aio-libs/yarl/issues/1176>`__.
+
+
+----
+
+
+1.13.1
+======
+
+*(2024-09-27)*
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of calling ``yarl.URL.build()`` with ``authority`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1163 <https://github.com/aio-libs/yarl/issues/1163>`__.
+
+
+----
+
+
+1.13.0
+======
+
+*(2024-09-26)*
+
+
+Bug fixes
+---------
+
+- Started rejecting ASCII hostnames with invalid characters. For host strings that
+ look like authority strings, the exception message includes advice on what to do
+ instead -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#880 <https://github.com/aio-libs/yarl/issues/880>`__, `#954 <https://github.com/aio-libs/yarl/issues/954>`__.
+
+- Fixed IPv6 addresses missing brackets when the ``~yarl.URL`` was converted to a string -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1157 <https://github.com/aio-libs/yarl/issues/1157>`__, `#1158 <https://github.com/aio-libs/yarl/issues/1158>`__.
+
+
+Features
+--------
+
+- Added ``~yarl.URL.host_subcomponent`` which returns the ``3986#section-3.2.2`` host subcomponent -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ The only current practical difference between ``~yarl.URL.raw_host`` and ``~yarl.URL.host_subcomponent`` is that IPv6 addresses are returned bracketed.
+
+ *Related issues and pull requests on GitHub:*
+ `#1159 <https://github.com/aio-libs/yarl/issues/1159>`__.
+
+
+----
+
+
+1.12.1
+======
+
+*(2024-09-23)*
+
+
+No significant changes.
+
+
+----
+
+
+1.12.0
+======
+
+*(2024-09-23)*
+
+
+Features
+--------
+
+- Added ``~yarl.URL.path_safe`` to be able to fetch the path without ``%2F`` and ``%25`` decoded -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1150 <https://github.com/aio-libs/yarl/issues/1150>`__.
+
+
+Removals and backward incompatible breaking changes
+---------------------------------------------------
+
+- Restore decoding ``%2F`` (``/``) in ``URL.path`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ This change restored the behavior before `#1057 <https://github.com/aio-libs/yarl/issues/1057>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1151 <https://github.com/aio-libs/yarl/issues/1151>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of processing paths -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1143 <https://github.com/aio-libs/yarl/issues/1143>`__.
+
+
+----
+
+
+1.11.1
+======
+
+*(2024-09-09)*
+
+
+Bug fixes
+---------
+
+- Allowed scheme replacement for relative URLs if the scheme does not require a host -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#280 <https://github.com/aio-libs/yarl/issues/280>`__, `#1138 <https://github.com/aio-libs/yarl/issues/1138>`__.
+
+- Allowed empty host for URL schemes other than the special schemes listed in the WHATWG URL spec -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1136 <https://github.com/aio-libs/yarl/issues/1136>`__.
+
+
+Features
+--------
+
+- Loosened restriction on integers as query string values to allow classes that implement ``__int__`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1139 <https://github.com/aio-libs/yarl/issues/1139>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of normalizing paths -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1137 <https://github.com/aio-libs/yarl/issues/1137>`__.
+
+
+----
+
+
+1.11.0
+======
+
+*(2024-09-08)*
+
+
+Features
+--------
+
+- Added ``URL.extend_query()()`` method, which can be used to extend parameters without replacing same named keys -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ This method was primarily added to replace the inefficient hand rolled method currently used in ``aiohttp``.
+
+ *Related issues and pull requests on GitHub:*
+ `#1128 <https://github.com/aio-libs/yarl/issues/1128>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of the Cython ``cached_property`` implementation -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1122 <https://github.com/aio-libs/yarl/issues/1122>`__.
+
+- Simplified computing ports by removing unnecessary code -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1123 <https://github.com/aio-libs/yarl/issues/1123>`__.
+
+- Improved performance of encoding non IPv6 hosts -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1125 <https://github.com/aio-libs/yarl/issues/1125>`__.
+
+- Improved performance of ``URL.build()()`` when the path, query string, or fragment is an empty string -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1126 <https://github.com/aio-libs/yarl/issues/1126>`__.
+
+- Improved performance of the ``URL.update_query()()`` method -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1130 <https://github.com/aio-libs/yarl/issues/1130>`__.
+
+- Improved performance of processing query string changes when arguments are ``str`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1131 <https://github.com/aio-libs/yarl/issues/1131>`__.
+
+
+----
+
+
+1.10.0
+======
+
+*(2024-09-06)*
+
+
+Bug fixes
+---------
+
+- Fixed joining a path when the existing path was empty -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ A regression in ``URL.join()()`` was introduced in `#1082 <https://github.com/aio-libs/yarl/issues/1082>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1118 <https://github.com/aio-libs/yarl/issues/1118>`__.
+
+
+Features
+--------
+
+- Added ``URL.without_query_params()()`` method, to drop some parameters from query string -- by `@hongquan <https://github.com/sponsors/hongquan>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#774 <https://github.com/aio-libs/yarl/issues/774>`__, `#898 <https://github.com/aio-libs/yarl/issues/898>`__, `#1010 <https://github.com/aio-libs/yarl/issues/1010>`__.
+
+- The previously protected types ``_SimpleQuery``, ``_QueryVariable``, and ``_Query`` are now available for use externally as ``SimpleQuery``, ``QueryVariable``, and ``Query`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1050 <https://github.com/aio-libs/yarl/issues/1050>`__, `#1113 <https://github.com/aio-libs/yarl/issues/1113>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- Replaced all ``~typing.Optional`` with ``~typing.Union`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1095 <https://github.com/aio-libs/yarl/issues/1095>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Significantly improved performance of parsing the network location -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1112 <https://github.com/aio-libs/yarl/issues/1112>`__.
+
+- Added internal types to the cache to prevent future refactoring errors -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1117 <https://github.com/aio-libs/yarl/issues/1117>`__.
+
+
+----
+
+
+1.9.11
+======
+
+*(2024-09-04)*
+
+
+Bug fixes
+---------
+
+- Fixed a ``TypeError`` with ``MultiDictProxy`` and Python 3.8 -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1084 <https://github.com/aio-libs/yarl/issues/1084>`__, `#1105 <https://github.com/aio-libs/yarl/issues/1105>`__, `#1107 <https://github.com/aio-libs/yarl/issues/1107>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of encoding hosts -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Previously, the library would unconditionally try to parse a host as an IP Address. The library now avoids trying to parse a host as an IP Address if the string is not in one of the formats described in ``3986#section-3.2.2``.
+
+ *Related issues and pull requests on GitHub:*
+ `#1104 <https://github.com/aio-libs/yarl/issues/1104>`__.
+
+
+----
+
+
+1.9.10
+======
+
+*(2024-09-04)*
+
+
+Bug fixes
+---------
+
+- ``URL.join()()`` has been changed to match
+ ``3986`` and align with
+ ``/ operation()`` and ``URL.joinpath()()``
+ when joining URLs with empty segments.
+ Previously ``urllib.parse.urljoin`` was used,
+ which has known issues with empty segments
+ (`python/cpython#84774 <https://github.com/python/cpython/issues/84774>`_).
+
+ Due to the semantics of ``URL.join()()``, joining an
+ URL with scheme requires making it relative, prefixing with ``./``.
+
+ .. code-block:: pycon
+
+ >>> URL("https://web.archive.org/web/").join(URL("./https://github.com/aio-libs/yarl"))
+ URL('https://web.archive.org/web/https://github.com/aio-libs/yarl')
+
+
+ Empty segments are honored in the base as well as the joined part.
+
+ .. code-block:: pycon
+
+ >>> URL("https://web.archive.org/web/https://").join(URL("github.com/aio-libs/yarl"))
+ URL('https://web.archive.org/web/https://github.com/aio-libs/yarl')
+
+
+
+ -- by `@commonism <https://github.com/sponsors/commonism>`__
+
+ This change initially appeared in 1.9.5 but was reverted in 1.9.6 to resolve a problem with query string handling.
+
+ *Related issues and pull requests on GitHub:*
+ `#1039 <https://github.com/aio-libs/yarl/issues/1039>`__, `#1082 <https://github.com/aio-libs/yarl/issues/1082>`__.
+
+
+Features
+--------
+
+- Added ``~yarl.URL.absolute`` which is now preferred over ``URL.is_absolute()`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1100 <https://github.com/aio-libs/yarl/issues/1100>`__.
+
+
+----
+
+
+1.9.9
+=====
+
+*(2024-09-04)*
+
+
+Bug fixes
+---------
+
+- Added missing type on ``~yarl.URL.port`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1097 <https://github.com/aio-libs/yarl/issues/1097>`__.
+
+
+----
+
+
+1.9.8
+=====
+
+*(2024-09-03)*
+
+
+Features
+--------
+
+- Covered the ``~yarl.URL`` object with types -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1084 <https://github.com/aio-libs/yarl/issues/1084>`__.
+
+- Cache parsing of IP Addresses when encoding hosts -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1086 <https://github.com/aio-libs/yarl/issues/1086>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- Covered the ``~yarl.URL`` object with types -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1084 <https://github.com/aio-libs/yarl/issues/1084>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of handling ports -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1081 <https://github.com/aio-libs/yarl/issues/1081>`__.
+
+
+----
+
+
+1.9.7
+=====
+
+*(2024-09-01)*
+
+
+Removals and backward incompatible breaking changes
+---------------------------------------------------
+
+- Removed support ``3986#section-3.2.3`` port normalization when the scheme is not one of ``http``, ``https``, ``wss``, or ``ws`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ Support for port normalization was recently added in `#1033 <https://github.com/aio-libs/yarl/issues/1033>`__ and contained code that would do blocking I/O if the scheme was not one of the four listed above. The code has been removed because this library is intended to be safe for usage with ``asyncio``.
+
+ *Related issues and pull requests on GitHub:*
+ `#1076 <https://github.com/aio-libs/yarl/issues/1076>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- Improved performance of property caching -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ The ``reify`` implementation from ``aiohttp`` was adapted to replace the internal ``cached_property`` implementation.
+
+ *Related issues and pull requests on GitHub:*
+ `#1070 <https://github.com/aio-libs/yarl/issues/1070>`__.
+
+
+----
+
+
+1.9.6
+=====
+
+*(2024-08-30)*
+
+
+Bug fixes
+---------
+
+- Reverted ``3986`` compatible ``URL.join()()`` honoring empty segments which was introduced in `#1039 <https://github.com/aio-libs/yarl/issues/1039>`__.
+
+ This change introduced a regression handling query string parameters with joined URLs. The change was reverted to maintain compatibility with the previous behavior.
+
+ *Related issues and pull requests on GitHub:*
+ `#1067 <https://github.com/aio-libs/yarl/issues/1067>`__.
+
+
+----
+
+
+1.9.5
+=====
+
+*(2024-08-30)*
+
+
+Bug fixes
+---------
+
+- Joining URLs with empty segments has been changed
+ to match ``3986``.
+
+ Previously empty segments would be removed from path,
+ breaking use-cases such as
+
+ .. code-block:: python
+
+ URL("https://web.archive.org/web/") / "https://github.com/"
+
+ Now ``/ operation()`` and ``URL.joinpath()()``
+ keep empty segments, but do not introduce new empty segments.
+ e.g.
+
+ .. code-block:: python
+
+ URL("https://example.org/") / ""
+
+ does not introduce an empty segment.
+
+ -- by `@commonism <https://github.com/sponsors/commonism>`__ and `@youtux <https://github.com/sponsors/youtux>`__
+
+ *Related issues and pull requests on GitHub:*
+ `#1026 <https://github.com/aio-libs/yarl/issues/1026>`__.
+
+- The default protocol ports of well-known URI schemes are now taken into account
+ during the normalization of the URL string representation in accordance with
+ ``3986#section-3.2.3``.
+
+ Specified ports are removed from the ``str`` representation of a ``~yarl.URL``
+ if the port matches the scheme's default port -- by `@commonism <https://github.com/sponsors/commonism>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1033 <https://github.com/aio-libs/yarl/issues/1033>`__.
+
+- ``URL.join()()`` has been changed to match
+ ``3986`` and align with
+ ``/ operation()`` and ``URL.joinpath()()``
+ when joining URLs with empty segments.
+ Previously ``urllib.parse.urljoin`` was used,
+ which has known issues with empty segments
+ (`python/cpython#84774 <https://github.com/python/cpython/issues/84774>`_).
+
+ Due to the semantics of ``URL.join()()``, joining an
+ URL with scheme requires making it relative, prefixing with ``./``.
+
+ .. code-block:: pycon
+
+ >>> URL("https://web.archive.org/web/").join(URL("./https://github.com/aio-libs/yarl"))
+ URL('https://web.archive.org/web/https://github.com/aio-libs/yarl')
+
+
+ Empty segments are honored in the base as well as the joined part.
+
+ .. code-block:: pycon
+
+ >>> URL("https://web.archive.org/web/https://").join(URL("github.com/aio-libs/yarl"))
+ URL('https://web.archive.org/web/https://github.com/aio-libs/yarl')
+
+
+
+ -- by `@commonism <https://github.com/sponsors/commonism>`__
+
+ *Related issues and pull requests on GitHub:*
+ `#1039 <https://github.com/aio-libs/yarl/issues/1039>`__.
+
+
+Removals and backward incompatible breaking changes
+---------------------------------------------------
+
+- Stopped decoding ``%2F`` (``/``) in ``URL.path``, as this could lead to code incorrectly treating it as a path separator
+ -- by `@Dreamsorcerer <https://github.com/sponsors/Dreamsorcerer>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1057 <https://github.com/aio-libs/yarl/issues/1057>`__.
+
+- Dropped support for Python 3.7 -- by `@Dreamsorcerer <https://github.com/sponsors/Dreamsorcerer>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1016 <https://github.com/aio-libs/yarl/issues/1016>`__.
+
+
+Improved documentation
+----------------------
+
+- On the ``Contributing docs`` page,
+ a link to the ``Towncrier philosophy`` has been fixed.
+
+ *Related issues and pull requests on GitHub:*
+ `#981 <https://github.com/aio-libs/yarl/issues/981>`__.
+
+- The pre-existing ``/ magic method()``
+ has been documented in the API reference -- by `@commonism <https://github.com/sponsors/commonism>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1026 <https://github.com/aio-libs/yarl/issues/1026>`__.
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- A flaw in the logic for copying the project directory into a
+ temporary folder that led to infinite recursion when ``TMPDIR``
+ was set to a project subdirectory path. This was happening in Fedora
+ and its downstream due to the use of `pyproject-rpm-macros
+ <https://src.fedoraproject.org/rpms/pyproject-rpm-macros>`__. It was
+ only reproducible with ``pip wheel`` and was not affecting the
+ ``pyproject-build`` users.
+
+ -- by `@hroncok <https://github.com/sponsors/hroncok>`__ and `@webknjaz <https://github.com/sponsors/webknjaz>`__
+
+ *Related issues and pull requests on GitHub:*
+ `#992 <https://github.com/aio-libs/yarl/issues/992>`__, `#1014 <https://github.com/aio-libs/yarl/issues/1014>`__.
+
+- Support Python 3.13 and publish non-free-threaded wheels
+
+ *Related issues and pull requests on GitHub:*
+ `#1054 <https://github.com/aio-libs/yarl/issues/1054>`__.
+
+
+Contributor-facing changes
+--------------------------
+
+- The CI/CD setup has been updated to test ``arm64`` wheels
+ under macOS 14, except for Python 3.7 that is unsupported
+ in that environment -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1015 <https://github.com/aio-libs/yarl/issues/1015>`__.
+
+- Removed unused type ignores and casts -- by `@hauntsaninja <https://github.com/sponsors/hauntsaninja>`__.
+
+ *Related issues and pull requests on GitHub:*
+ `#1031 <https://github.com/aio-libs/yarl/issues/1031>`__.
+
+
+Miscellaneous internal changes
+------------------------------
+
+- ``port``, ``scheme``, and ``raw_host`` are now ``cached_property`` -- by `@bdraco <https://github.com/sponsors/bdraco>`__.
+
+ ``aiohttp`` accesses these properties quite often, which cause ``urllib`` to build the ``_hostinfo`` property every time. ``port``, ``scheme``, and ``raw_host`` are now cached properties, which will improve performance.
+
+ *Related issues and pull requests on GitHub:*
+ `#1044 <https://github.com/aio-libs/yarl/issues/1044>`__, `#1058 <https://github.com/aio-libs/yarl/issues/1058>`__.
+
+
+----
+
+
+1.9.4 (2023-12-06)
+==================
+
+Bug fixes
+---------
+
+- Started raising ``TypeError`` when a string value is passed into
+ ``yarl.URL.build()`` as the ``port`` argument -- by `@commonism <https://github.com/sponsors/commonism>`__.
+
+ Previously the empty string as port would create malformed URLs when rendered as string representations. (`#883 <https://github.com/aio-libs/yarl/issues/883>`__)
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- The leading ``--`` has been dropped from the `PEP 517 <https://peps.python.org/pep-517>`__ in-tree build
+ backend config setting names. ``--pure-python`` is now just ``pure-python``
+ -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ The usage now looks as follows:
+
+ .. code-block:: console
+
+ $ python -m build \
+ --config-setting=pure-python=true \
+ --config-setting=with-cython-tracing=true
+
+ (`#963 <https://github.com/aio-libs/yarl/issues/963>`__)
+
+
+Contributor-facing changes
+--------------------------
+
+- A step-by-step ``Release Guide`` guide has
+ been added, describing how to release *yarl* -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ This is primarily targeting maintainers. (`#960 <https://github.com/aio-libs/yarl/issues/960>`__)
+- Coverage collection has been implemented for the Cython modules
+ -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ It will also be reported to Codecov from any non-release CI jobs.
+
+ To measure coverage in a development environment, *yarl* can be
+ installed in editable mode:
+
+ .. code-block:: console
+
+ $ python -Im pip install -e .
+
+ Editable install produces C-files required for the Cython coverage
+ plugin to map the measurements back to the PYX-files.
+
+ `#961 <https://github.com/aio-libs/yarl/issues/961>`__
+
+- It is now possible to request line tracing in Cython builds using the
+ ``with-cython-tracing`` `PEP 517 <https://peps.python.org/pep-517>`__ config setting
+ -- `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ This can be used in CI and development environment to measure coverage
+ on Cython modules, but is not normally useful to the end-users or
+ downstream packagers.
+
+ Here's a usage example:
+
+ .. code-block:: console
+
+ $ python -Im pip install . --config-settings=with-cython-tracing=true
+
+ For editable installs, this setting is on by default. Otherwise, it's
+ off unless requested explicitly.
+
+ The following produces C-files required for the Cython coverage
+ plugin to map the measurements back to the PYX-files:
+
+ .. code-block:: console
+
+ $ python -Im pip install -e .
+
+ Alternatively, the ``YARL_CYTHON_TRACING=1`` environment variable
+ can be set to do the same as the `PEP 517 <https://peps.python.org/pep-517>`__ config setting.
+
+ `#962 <https://github.com/aio-libs/yarl/issues/962>`__
+
+
+1.9.3 (2023-11-20)
+==================
+
+Bug fixes
+---------
+
+- Stopped dropping trailing slashes in ``yarl.URL.joinpath()`` -- by `@gmacon <https://github.com/sponsors/gmacon>`__. (`#862 <https://github.com/aio-libs/yarl/issues/862>`__, `#866 <https://github.com/aio-libs/yarl/issues/866>`__)
+- Started accepting string subclasses in ``yarl.URL.__truediv__()`` operations (``URL / segment``) -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#871 <https://github.com/aio-libs/yarl/issues/871>`__, `#884 <https://github.com/aio-libs/yarl/issues/884>`__)
+- Fixed the human representation of URLs with square brackets in usernames and passwords -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#876 <https://github.com/aio-libs/yarl/issues/876>`__, `#882 <https://github.com/aio-libs/yarl/issues/882>`__)
+- Updated type hints to include ``URL.missing_port()``, ``URL.__bytes__()``
+ and the ``encoding`` argument to ``yarl.URL.joinpath()``
+ -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#891 <https://github.com/aio-libs/yarl/issues/891>`__)
+
+
+Packaging updates and notes for downstreams
+-------------------------------------------
+
+- Integrated Cython 3 to enable building *yarl* under Python 3.12 -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#829 <https://github.com/aio-libs/yarl/issues/829>`__, `#881 <https://github.com/aio-libs/yarl/issues/881>`__)
+- Declared modern ``setuptools.build_meta`` as the `PEP 517 <https://peps.python.org/pep-517>`__ build
+ backend in ``pyproject.toml`` explicitly -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__. (`#886 <https://github.com/aio-libs/yarl/issues/886>`__)
+- Converted most of the packaging setup into a declarative ``setup.cfg``
+ config -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__. (`#890 <https://github.com/aio-libs/yarl/issues/890>`__)
+- The packaging is replaced from an old-fashioned ``setup.py`` to an
+ in-tree `PEP 517 <https://peps.python.org/pep-517>`__ build backend -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ Whenever the end-users or downstream packagers need to build ``yarl`` from
+ source (a Git checkout or an sdist), they may pass a ``config_settings``
+ flag ``--pure-python``. If this flag is not set, a C-extension will be built
+ and included into the distribution.
+
+ Here is how this can be done with ``pip``:
+
+ .. code-block:: console
+
+ $ python -m pip install . --config-settings=--pure-python=false
+
+ This will also work with ``-e | --editable``.
+
+ The same can be achieved via ``pypa/build``:
+
+ .. code-block:: console
+
+ $ python -m build --config-setting=--pure-python=false
+
+ Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source
+ directly, as opposed to building an ``sdist`` and then building from it. (`#893 <https://github.com/aio-libs/yarl/issues/893>`__)
+
+ .. attention::
+
+ v1.9.3 was the only version using the ``--pure-python`` setting name.
+ Later versions dropped the ``--`` prefix, making it just ``pure-python``.
+
+- Declared Python 3.12 supported officially in the distribution package metadata
+ -- by `@edgarrmondragon <https://github.com/sponsors/edgarrmondragon>`__. (`#942 <https://github.com/aio-libs/yarl/issues/942>`__)
+
+
+Contributor-facing changes
+--------------------------
+
+- A regression test for no-host URLs was added per `#821 <https://github.com/aio-libs/yarl/issues/821>`__
+ and ``3986`` -- by `@kenballus <https://github.com/sponsors/kenballus>`__. (`#821 <https://github.com/aio-libs/yarl/issues/821>`__, `#822 <https://github.com/aio-libs/yarl/issues/822>`__)
+- Started testing *yarl* against Python 3.12 in CI -- by `@mjpieters <https://github.com/sponsors/mjpieters>`__. (`#881 <https://github.com/aio-libs/yarl/issues/881>`__)
+- All Python 3.12 jobs are now marked as required to pass in CI
+ -- by `@edgarrmondragon <https://github.com/sponsors/edgarrmondragon>`__. (`#942 <https://github.com/aio-libs/yarl/issues/942>`__)
+- MyST is now integrated in Sphinx -- by `@webknjaz <https://github.com/sponsors/webknjaz>`__.
+
+ This allows the contributors to author new documents in Markdown
+ when they have difficulties with going straight RST. (`#953 <https://github.com/aio-libs/yarl/issues/953>`__)
+
+
+1.9.2 (2023-04-25)
+==================
+
+Bugfixes
+--------
+
+- Fix regression with ``yarl.URL.__truediv__()`` and absolute URLs with empty paths causing the raw path to lack the leading ``/``.
+ (`#854 <https://github.com/aio-libs/yarl/issues/854>`_)
+
+
+1.9.1 (2023-04-21)
+==================
+
+Bugfixes
+--------
+
+- Marked tests that fail on older Python patch releases (< 3.7.10, < 3.8.8 and < 3.9.2) as expected to fail due to missing a security fix for CVE-2021-23336. (`#850 <https://github.com/aio-libs/yarl/issues/850>`_)
+
+
+1.9.0 (2023-04-19)
+==================
+
+This release was never published to PyPI, due to issues with the build process.
+
+Features
+--------
+
+- Added ``URL.joinpath(*elements)``, to create a new URL appending multiple path elements. (`#704 <https://github.com/aio-libs/yarl/issues/704>`_)
+- Made ``URL.__truediv__()()`` return ``NotImplemented`` if called with an
+ unsupported type — by `@michaeljpeters <https://github.com/sponsors/michaeljpeters>`__.
+ (`#832 <https://github.com/aio-libs/yarl/issues/832>`_)
+
+
+Bugfixes
+--------
+
+- Path normalization for absolute URLs no longer raises a ValueError exception
+ when ``..`` segments would otherwise go beyond the URL path root.
+ (`#536 <https://github.com/aio-libs/yarl/issues/536>`_)
+- Fixed an issue with update_query() not getting rid of the query when argument is None. (`#792 <https://github.com/aio-libs/yarl/issues/792>`_)
+- Added some input restrictions on with_port() function to prevent invalid boolean inputs or out of valid port inputs; handled incorrect 0 port representation. (`#793 <https://github.com/aio-libs/yarl/issues/793>`_)
+- Made ``yarl.URL.build()`` raise a ``TypeError`` if the ``host`` argument is ``None`` — by `@paulpapacz <https://github.com/sponsors/paulpapacz>`__. (`#808 <https://github.com/aio-libs/yarl/issues/808>`_)
+- Fixed an issue with ``update_query()`` getting rid of the query when the argument
+ is empty but not ``None``. (`#845 <https://github.com/aio-libs/yarl/issues/845>`_)
+
+
+Misc
+----
+
+- `#220 <https://github.com/aio-libs/yarl/issues/220>`_
+
+
+1.8.2 (2022-12-03)
+==================
+
+This is the first release that started shipping wheels for Python 3.11.
+
+
+1.8.1 (2022-08-01)
+==================
+
+Misc
+----
+
+- `#694 <https://github.com/aio-libs/yarl/issues/694>`_, `#699 <https://github.com/aio-libs/yarl/issues/699>`_, `#700 <https://github.com/aio-libs/yarl/issues/700>`_, `#701 <https://github.com/aio-libs/yarl/issues/701>`_, `#702 <https://github.com/aio-libs/yarl/issues/702>`_, `#703 <https://github.com/aio-libs/yarl/issues/703>`_, `#739 <https://github.com/aio-libs/yarl/issues/739>`_
+
+
+1.8.0 (2022-08-01)
+==================
+
+Features
+--------
+
+- Added ``URL.raw_suffix``, ``URL.suffix``, ``URL.raw_suffixes``, ``URL.suffixes``, ``URL.with_suffix``. (`#613 <https://github.com/aio-libs/yarl/issues/613>`_)
+
+
+Improved Documentation
+----------------------
+
+- Fixed broken internal references to ``yarl.URL.human_repr()``.
+ (`#665 <https://github.com/aio-libs/yarl/issues/665>`_)
+- Fixed broken external references to ``multidict:index`` docs. (`#665 <https://github.com/aio-libs/yarl/issues/665>`_)
+
+
+Deprecations and Removals
+-------------------------
+
+- Dropped Python 3.6 support. (`#672 <https://github.com/aio-libs/yarl/issues/672>`_)
+
+
+Misc
+----
+
+- `#646 <https://github.com/aio-libs/yarl/issues/646>`_, `#699 <https://github.com/aio-libs/yarl/issues/699>`_, `#701 <https://github.com/aio-libs/yarl/issues/701>`_
+
+
+1.7.2 (2021-11-01)
+==================
+
+Bugfixes
+--------
+
+- Changed call in ``with_port()`` to stop reencoding parts of the URL that were already encoded. (`#623 <https://github.com/aio-libs/yarl/issues/623>`_)
+
+
+1.7.1 (2021-10-07)
+==================
+
+Bugfixes
+--------
+
+- Fix 1.7.0 build error
+
+1.7.0 (2021-10-06)
+==================
+
+Features
+--------
+
+- Add ``__bytes__()`` magic method so that ``bytes(url)`` will work and use optimal ASCII encoding.
+ (`#582 <https://github.com/aio-libs/yarl/issues/582>`_)
+- Started shipping platform-specific arm64 wheels for Apple Silicon. (`#622 <https://github.com/aio-libs/yarl/issues/622>`_)
+- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. (`#622 <https://github.com/aio-libs/yarl/issues/622>`_)
+- Added support for Python 3.10. (`#622 <https://github.com/aio-libs/yarl/issues/622>`_)
+
+
+1.6.3 (2020-11-14)
+==================
+
+Bugfixes
+--------
+
+- No longer loose characters when decoding incorrect percent-sequences (like ``%e2%82%f8``). All non-decodable percent-sequences are now preserved.
+ `#517 <https://github.com/aio-libs/yarl/issues/517>`_
+- Provide x86 Windows wheels.
+ `#535 <https://github.com/aio-libs/yarl/issues/535>`_
+
+
+----
+
+
+1.6.2 (2020-10-12)
+==================
+
+
+Bugfixes
+--------
+
+- Provide generated ``.c`` files in TarBall distribution.
+ `#530 <https://github.com/aio-libs/multidict/issues/530>`_
+
+1.6.1 (2020-10-12)
+==================
+
+Features
+--------
+
+- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on
+ Linux as well as ``x86_64``.
+ `#507 <https://github.com/aio-libs/yarl/issues/507>`_
+- Provide wheels for Python 3.9.
+ `#526 <https://github.com/aio-libs/yarl/issues/526>`_
+
+Bugfixes
+--------
+
+- ``human_repr()`` now always produces valid representation equivalent to the original URL (if the original URL is valid).
+ `#511 <https://github.com/aio-libs/yarl/issues/511>`_
+- Fixed requoting a single percent followed by a percent-encoded character in the Cython implementation.
+ `#514 <https://github.com/aio-libs/yarl/issues/514>`_
+- Fix ValueError when decoding ``%`` which is not followed by two hexadecimal digits.
+ `#516 <https://github.com/aio-libs/yarl/issues/516>`_
+- Fix decoding ``%`` followed by a space and hexadecimal digit.
+ `#520 <https://github.com/aio-libs/yarl/issues/520>`_
+- Fix annotation of ``with_query()``/``update_query()`` methods for ``key=[val1, val2]`` case.
+ `#528 <https://github.com/aio-libs/yarl/issues/528>`_
+
+Removal
+-------
+
+- Drop Python 3.5 support; Python 3.6 is the minimal supported Python version.
+
+
+----
+
+
+1.6.0 (2020-09-23)
+==================
+
+Features
+--------
+
+- Allow for int and float subclasses in query, while still denying bool.
+ `#492 <https://github.com/aio-libs/yarl/issues/492>`_
+
+
+Bugfixes
+--------
+
+- Do not requote arguments in ``URL.build()``, ``with_xxx()`` and in ``/`` operator.
+ `#502 <https://github.com/aio-libs/yarl/issues/502>`_
+- Keep IPv6 brackets in ``origin()``.
+ `#504 <https://github.com/aio-libs/yarl/issues/504>`_
+
+
+----
+
+
+1.5.1 (2020-08-01)
+==================
+
+Bugfixes
+--------
+
+- Fix including relocated internal ``yarl._quoting_c`` C-extension into published PyPI dists.
+ `#485 <https://github.com/aio-libs/yarl/issues/485>`_
+
+
+Misc
+----
+
+- `#484 <https://github.com/aio-libs/yarl/issues/484>`_
+
+
+----
+
+
+1.5.0 (2020-07-26)
+==================
+
+Features
+--------
+
+- Convert host to lowercase on URL building.
+ `#386 <https://github.com/aio-libs/yarl/issues/386>`_
+- Allow using ``mod`` operator (``%``) for updating query string (an alias for ``update_query()`` method).
+ `#435 <https://github.com/aio-libs/yarl/issues/435>`_
+- Allow use of sequences such as ``list`` and ``tuple`` in the values
+ of a mapping such as ``dict`` to represent that a key has many values::
+
+ url = URL("http://example.com")
+ assert url.with_query({"a": [1, 2]}) == URL("http://example.com/?a=1&a=2")
+
+ `#443 <https://github.com/aio-libs/yarl/issues/443>`_
+- Support ``URL.build()`` with scheme and path (creates a relative URL).
+ `#464 <https://github.com/aio-libs/yarl/issues/464>`_
+- Cache slow IDNA encode/decode calls.
+ `#476 <https://github.com/aio-libs/yarl/issues/476>`_
+- Add ``@final`` / ``Final`` type hints
+ `#477 <https://github.com/aio-libs/yarl/issues/477>`_
+- Support URL authority/raw_authority properties and authority argument of ``URL.build()`` method.
+ `#478 <https://github.com/aio-libs/yarl/issues/478>`_
+- Hide the library implementation details, make the exposed public list very clean.
+ `#483 <https://github.com/aio-libs/yarl/issues/483>`_
+
+
+Bugfixes
+--------
+
+- Fix tests with newer Python (3.7.6, 3.8.1 and 3.9.0+).
+ `#409 <https://github.com/aio-libs/yarl/issues/409>`_
+- Fix a bug where query component, passed in a form of mapping or sequence, is unquoted in unexpected way.
+ `#426 <https://github.com/aio-libs/yarl/issues/426>`_
+- Hide ``Query`` and ``QueryVariable`` type aliases in ``__init__.pyi``, now they are prefixed with underscore.
+ `#431 <https://github.com/aio-libs/yarl/issues/431>`_
+- Keep IPv6 brackets after updating port/user/password.
+ `#451 <https://github.com/aio-libs/yarl/issues/451>`_
+
+
+----
+
+
+1.4.2 (2019-12-05)
+==================
+
+Features
+--------
+
+- Workaround for missing ``str.isascii()`` in Python 3.6
+ `#389 <https://github.com/aio-libs/yarl/issues/389>`_
+
+
+----
+
+
+1.4.1 (2019-11-29)
+==================
+
+* Fix regression, make the library work on Python 3.5 and 3.6 again.
+
+1.4.0 (2019-11-29)
+==================
+
+* Distinguish an empty password in URL from a password not provided at all (#262)
+
+* Fixed annotations for optional parameters of ``URL.build`` (#309)
+
+* Use None as default value of ``user`` parameter of ``URL.build`` (#309)
+
+* Enforce building C Accelerated modules when installing from source tarball, use
+ ``YARL_NO_EXTENSIONS`` environment variable for falling back to (slower) Pure Python
+ implementation (#329)
+
+* Drop Python 3.5 support
+
+* Fix quoting of plus in path by pure python version (#339)
+
+* Don't create a new URL if fragment is unchanged (#292)
+
+* Included in error message the path that produces starting slash forbidden error (#376)
+
+* Skip slow IDNA encoding for ASCII-only strings (#387)
+
+
+1.3.0 (2018-12-11)
+==================
+
+* Fix annotations for ``query`` parameter (#207)
+
+* An incoming query sequence can have int variables (the same as for
+ Mapping type) (#208)
+
+* Add ``URL.explicit_port`` property (#218)
+
+* Give a friendlier error when port can't be converted to int (#168)
+
+* ``bool(URL())`` now returns ``False`` (#272)
+
+1.2.6 (2018-06-14)
+==================
+
+* Drop Python 3.4 trove classifier (#205)
+
+1.2.5 (2018-05-23)
+==================
+
+* Fix annotations for ``build`` (#199)
+
+1.2.4 (2018-05-08)
+==================
+
+* Fix annotations for ``cached_property`` (#195)
+
+1.2.3 (2018-05-03)
+==================
+
+* Accept ``str`` subclasses in ``URL`` constructor (#190)
+
+1.2.2 (2018-05-01)
+==================
+
+* Fix build
+
+1.2.1 (2018-04-30)
+==================
+
+* Pin minimal required Python to 3.5.3 (#189)
+
+1.2.0 (2018-04-30)
+==================
+
+* Forbid inheritance, replace ``__init__`` with ``__new__`` (#171)
+
+* Support PEP-561 (provide type hinting marker) (#182)
+
+1.1.1 (2018-02-17)
+==================
+
+* Fix performance regression: don't encode empty ``netloc`` (#170)
+
+1.1.0 (2018-01-21)
+==================
+
+* Make pure Python quoter consistent with Cython version (#162)
+
+1.0.0 (2018-01-15)
+==================
+
+* Use fast path if quoted string does not need requoting (#154)
+
+* Speed up quoting/unquoting by ``_Quoter`` and ``_Unquoter`` classes (#155)
+
+* Drop ``yarl.quote`` and ``yarl.unquote`` public functions (#155)
+
+* Add custom string writer, reuse static buffer if available (#157)
+ Code is 50-80 times faster than Pure Python version (was 4-5 times faster)
+
+* Don't recode IP zone (#144)
+
+* Support ``encoded=True`` in ``yarl.URL.build()`` (#158)
+
+* Fix updating query with multiple keys (#160)
+
+0.18.0 (2018-01-10)
+===================
+
+* Fallback to IDNA 2003 if domain name is not IDNA 2008 compatible (#152)
+
+0.17.0 (2017-12-30)
+===================
+
+* Use IDNA 2008 for domain name processing (#149)
+
+0.16.0 (2017-12-07)
+===================
+
+* Fix raising ``TypeError`` by ``url.query_string()`` after
+ ``url.with_query({})`` (empty mapping) (#141)
+
+0.15.0 (2017-11-23)
+===================
+
+* Add ``raw_path_qs`` attribute (#137)
+
+0.14.2 (2017-11-14)
+===================
+
+* Restore ``strict`` parameter as no-op in ``quote`` / ``unquote``
+
+0.14.1 (2017-11-13)
+===================
+
+* Restore ``strict`` parameter as no-op for sake of compatibility with
+ aiohttp 2.2
+
+0.14.0 (2017-11-11)
+===================
+
+* Drop strict mode (#123)
+
+* Fix ``"ValueError: Unallowed PCT %"`` when there's a ``"%"`` in the URL (#124)
+
+0.13.0 (2017-10-01)
+===================
+
+* Document ``encoded`` parameter (#102)
+
+* Support relative URLs like ``'?key=value'`` (#100)
+
+* Unsafe encoding for QS fixed. Encode ``;`` character in value parameter (#104)
+
+* Process passwords without user names (#95)
+
+0.12.0 (2017-06-26)
+===================
+
+* Properly support paths without leading slash in ``URL.with_path()`` (#90)
+
+* Enable type annotation checks
+
+0.11.0 (2017-06-26)
+===================
+
+* Normalize path (#86)
+
+* Clear query and fragment parts in ``.with_path()`` (#85)
+
+0.10.3 (2017-06-13)
+===================
+
+* Prevent double URL arguments unquoting (#83)
+
+0.10.2 (2017-05-05)
+===================
+
+* Unexpected hash behavior (#75)
+
+
+0.10.1 (2017-05-03)
+===================
+
+* Unexpected compare behavior (#73)
+
+* Do not quote or unquote + if not a query string. (#74)
+
+
+0.10.0 (2017-03-14)
+===================
+
+* Added ``URL.build`` class method (#58)
+
+* Added ``path_qs`` attribute (#42)
+
+
+0.9.8 (2017-02-16)
+==================
+
+* Do not quote ``:`` in path
+
+
+0.9.7 (2017-02-16)
+==================
+
+* Load from pickle without _cache (#56)
+
+* Percent-encoded pluses in path variables become spaces (#59)
+
+
+0.9.6 (2017-02-15)
+==================
+
+* Revert backward incompatible change (BaseURL)
+
+
+0.9.5 (2017-02-14)
+==================
+
+* Fix BaseURL rich comparison support
+
+
+0.9.4 (2017-02-14)
+==================
+
+* Use BaseURL
+
+
+0.9.3 (2017-02-14)
+==================
+
+* Added BaseURL
+
+
+0.9.2 (2017-02-08)
+==================
+
+* Remove debug print
+
+
+0.9.1 (2017-02-07)
+==================
+
+* Do not lose tail chars (#45)
+
+
+0.9.0 (2017-02-07)
+==================
+
+* Allow to quote ``%`` in non strict mode (#21)
+
+* Incorrect parsing of query parameters with %3B (;) inside (#34)
+
+* Fix core dumps (#41)
+
+* ``tmpbuf`` - compiling error (#43)
+
+* Added ``URL.update_path()`` method
+
+* Added ``URL.update_query()`` method (#47)
+
+
+0.8.1 (2016-12-03)
+==================
+
+* Fix broken aiohttp: revert back ``quote`` / ``unquote``.
+
+
+0.8.0 (2016-12-03)
+==================
+
+* Support more verbose error messages in ``.with_query()`` (#24)
+
+* Don't percent-encode ``@`` and ``:`` in path (#32)
+
+* Don't expose ``yarl.quote`` and ``yarl.unquote``, these functions are
+ part of private API
+
+0.7.1 (2016-11-18)
+==================
+
+* Accept not only ``str`` but all classes inherited from ``str`` also (#25)
+
+0.7.0 (2016-11-07)
+==================
+
+* Accept ``int`` as value for ``.with_query()``
+
+0.6.0 (2016-11-07)
+==================
+
+* Explicitly use UTF8 encoding in ``setup.py`` (#20)
+* Properly unquote non-UTF8 strings (#19)
+
+0.5.3 (2016-11-02)
+==================
+
+* Don't use ``typing.NamedTuple`` fields but indexes on URL construction
+
+0.5.2 (2016-11-02)
+==================
+
+* Inline ``_encode`` class method
+
+0.5.1 (2016-11-02)
+==================
+
+* Make URL construction faster by removing extra classmethod calls
+
+0.5.0 (2016-11-02)
+==================
+
+* Add Cython optimization for quoting/unquoting
+* Provide binary wheels
+
+0.4.3 (2016-09-29)
+==================
+
+* Fix typing stubs
+
+0.4.2 (2016-09-29)
+==================
+
+* Expose ``quote()`` and ``unquote()`` as public API
+
+0.4.1 (2016-09-28)
+==================
+
+* Support empty values in query (``'/path?arg'``)
+
+0.4.0 (2016-09-27)
+==================
+
+* Introduce ``relative()`` (#16)
+
+0.3.2 (2016-09-27)
+==================
+
+* Typo fixes #15
+
+0.3.1 (2016-09-26)
+==================
+
+* Support sequence of pairs as ``with_query()`` parameter
+
+0.3.0 (2016-09-26)
+==================
+
+* Introduce ``is_default_port()``
+
+0.2.1 (2016-09-26)
+==================
+
+* Raise ValueError for URLs like 'http://:8080/'
+
+0.2.0 (2016-09-18)
+==================
+
+* Avoid doubling slashes when joining paths (#13)
+
+* Appending path starting from slash is forbidden (#12)
+
+0.1.4 (2016-09-09)
+==================
+
+* Add ``kwargs`` support for ``with_query()`` (#10)
+
+0.1.3 (2016-09-07)
+==================
+
+* Document ``with_query()``, ``with_fragment()`` and ``origin()``
+
+* Allow ``None`` for ``with_query()`` and ``with_fragment()``
+
+0.1.2 (2016-09-07)
+==================
+
+* Fix links, tune docs theme.
+
+0.1.1 (2016-09-06)
+==================
+
+* Update README, old version used obsolete API
+
+0.1.0 (2016-09-06)
+==================
+
+* The library was deeply refactored, bytes are gone away but all
+ accepted strings are encoded if needed.
+
+0.0.1 (2016-08-30)
+==================
+
+* The first release.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/RECORD" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/RECORD"
new file mode 100644
index 0000000..f87588b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/RECORD"
@@ -0,0 +1,26 @@
+yarl-1.22.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+yarl-1.22.0.dist-info/METADATA,sha256=mGnUBDeH_AIy84ehLyO_STPVAUJky2pUaGVD0FdTq0E,77596
+yarl-1.22.0.dist-info/RECORD,,
+yarl-1.22.0.dist-info/WHEEL,sha256=8UP9x9puWI0P1V_d7K2oMTBqfeLNm21CTzZ_Ptr0NXU,101
+yarl-1.22.0.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
+yarl-1.22.0.dist-info/licenses/NOTICE,sha256=VtasbIEFwKUTBMIdsGDjYa-ajqCvmnXCOcKLXRNpODg,609
+yarl-1.22.0.dist-info/top_level.txt,sha256=vf3SJuQh-k7YtvsUrV_OPOrT9Kqn0COlk7IPYyhtGkQ,5
+yarl/__init__.py,sha256=woYZp7KGli7_1P_hR7ZU9ckEj6ho41smyP-PLfEL-lk,281
+yarl/__pycache__/__init__.cpython-312.pyc,,
+yarl/__pycache__/_parse.cpython-312.pyc,,
+yarl/__pycache__/_path.cpython-312.pyc,,
+yarl/__pycache__/_query.cpython-312.pyc,,
+yarl/__pycache__/_quoters.cpython-312.pyc,,
+yarl/__pycache__/_quoting.cpython-312.pyc,,
+yarl/__pycache__/_quoting_py.cpython-312.pyc,,
+yarl/__pycache__/_url.cpython-312.pyc,,
+yarl/_parse.py,sha256=gNt8zxVFGr95ufUQpSMiiZ9vDrvg4zq6MEtT3f6_8J0,7185
+yarl/_path.py,sha256=A0FJUylZyzmlT0a3UDOBbK-EzZXCAYuQQBvG9eAC9hs,1291
+yarl/_query.py,sha256=nwGAYewdOU8nt5YZNZxqQ4BGES82Y3Y6LanxqTjnZxw,4068
+yarl/_quoters.py,sha256=z-BzsXfLnJK-bd-HrGaoKGri9L3GpDv6vxFEtmu-uCM,1154
+yarl/_quoting.py,sha256=yKIqFTzFzWLVb08xy1DSxKNjFwo4f-oLlzxTuKwC57M,506
+yarl/_quoting_c.cp312-win_amd64.pyd,sha256=Hb8YpX-n6oZXABQba_PKdlbRNrfLWwE0wtp3RLS0xlM,87040
+yarl/_quoting_c.pyx,sha256=X40gvQSUB4l7nPKGeiS6pq2JreM36avLhVeBMxd5zmo,14297
+yarl/_quoting_py.py,sha256=7WD7IHhgaJiLZWoIewvB0JRUsbz9McmfZw5TnjlVs9o,6783
+yarl/_url.py,sha256=4K5gCdoQtVi9FmnQdssEqafdlJILKxSap8RNCBC4IGE,55608
+yarl/py.typed,sha256=ay5OMO475PlcZ_Fbun9maHW7Y6MBTk0UXL4ztHx3Iug,14
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/WHEEL" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/WHEEL"
new file mode 100644
index 0000000..10ac2c2
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/WHEEL"
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-win_amd64
+
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/LICENSE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/LICENSE"
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/LICENSE"
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/NOTICE" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/NOTICE"
new file mode 100644
index 0000000..fa53b2b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/licenses/NOTICE"
@@ -0,0 +1,13 @@
+ Copyright 2016-2021, Andrew Svetlov and aio-libs team
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/top_level.txt" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/top_level.txt"
new file mode 100644
index 0000000..e93e8bd
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl-1.22.0.dist-info/top_level.txt"
@@ -0,0 +1 @@
+yarl
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/__init__.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/__init__.py"
new file mode 100644
index 0000000..e45554b
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/__init__.py"
@@ -0,0 +1,14 @@
+from ._query import Query, QueryVariable, SimpleQuery
+from ._url import URL, cache_clear, cache_configure, cache_info
+
+__version__ = "1.22.0"
+
+__all__ = (
+ "URL",
+ "SimpleQuery",
+ "QueryVariable",
+ "Query",
+ "cache_clear",
+ "cache_configure",
+ "cache_info",
+)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_parse.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_parse.py"
new file mode 100644
index 0000000..115d772
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_parse.py"
@@ -0,0 +1,203 @@
+"""URL parsing utilities."""
+
+import re
+import unicodedata
+from functools import lru_cache
+from typing import Union
+from urllib.parse import scheme_chars, uses_netloc
+
+from ._quoters import QUOTER, UNQUOTER_PLUS
+
+# Leading and trailing C0 control and space to be stripped per WHATWG spec.
+# == "".join([chr(i) for i in range(0, 0x20 + 1)])
+WHATWG_C0_CONTROL_OR_SPACE = (
+ "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10"
+ "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f "
+)
+
+# Unsafe bytes to be removed per WHATWG spec
+UNSAFE_URL_BYTES_TO_REMOVE = ["\t", "\r", "\n"]
+USES_AUTHORITY = frozenset(uses_netloc)
+
+SplitURLType = tuple[str, str, str, str, str]
+
+
+def split_url(url: str) -> SplitURLType:
+ """Split URL into parts."""
+ # Adapted from urllib.parse.urlsplit
+ # Only lstrip url as some applications rely on preserving trailing space.
+ # (https://url.spec.whatwg.org/#concept-basic-url-parser would strip both)
+ url = url.lstrip(WHATWG_C0_CONTROL_OR_SPACE)
+ for b in UNSAFE_URL_BYTES_TO_REMOVE:
+ if b in url:
+ url = url.replace(b, "")
+
+ scheme = netloc = query = fragment = ""
+ i = url.find(":")
+ if i > 0 and url[0] in scheme_chars:
+ for c in url[1:i]:
+ if c not in scheme_chars:
+ break
+ else:
+ scheme, url = url[:i].lower(), url[i + 1 :]
+ has_hash = "#" in url
+ has_question_mark = "?" in url
+ if url[:2] == "//":
+ delim = len(url) # position of end of domain part of url, default is end
+ if has_hash and has_question_mark:
+ delim_chars = "/?#"
+ elif has_question_mark:
+ delim_chars = "/?"
+ elif has_hash:
+ delim_chars = "/#"
+ else:
+ delim_chars = "/"
+ for c in delim_chars: # look for delimiters; the order is NOT important
+ wdelim = url.find(c, 2) # find first of this delim
+ if wdelim >= 0 and wdelim < delim: # if found
+ delim = wdelim # use earliest delim position
+ netloc = url[2:delim]
+ url = url[delim:]
+ has_left_bracket = "[" in netloc
+ has_right_bracket = "]" in netloc
+ if (has_left_bracket and not has_right_bracket) or (
+ has_right_bracket and not has_left_bracket
+ ):
+ raise ValueError("Invalid IPv6 URL")
+ if has_left_bracket:
+ bracketed_host = netloc.partition("[")[2].partition("]")[0]
+ # Valid bracketed hosts are defined in
+ # https://www.rfc-editor.org/rfc/rfc3986#page-49
+ # https://url.spec.whatwg.org/
+ if bracketed_host and bracketed_host[0] == "v":
+ if not re.match(r"\Av[a-fA-F0-9]+\..+\Z", bracketed_host):
+ raise ValueError("IPvFuture address is invalid")
+ elif ":" not in bracketed_host:
+ raise ValueError("The IPv6 content between brackets is not valid")
+ if has_hash:
+ url, _, fragment = url.partition("#")
+ if has_question_mark:
+ url, _, query = url.partition("?")
+ if netloc and not netloc.isascii():
+ _check_netloc(netloc)
+ return scheme, netloc, url, query, fragment
+
+
+def _check_netloc(netloc: str) -> None:
+ # Adapted from urllib.parse._checknetloc
+ # looking for characters like \u2100 that expand to 'a/c'
+ # IDNA uses NFKC equivalence, so normalize for this check
+
+ # ignore characters already included
+ # but not the surrounding text
+ n = netloc.replace("@", "").replace(":", "").replace("#", "").replace("?", "")
+ normalized_netloc = unicodedata.normalize("NFKC", n)
+ if n == normalized_netloc:
+ return
+ # Note that there are no unicode decompositions for the character '@' so
+ # its currently impossible to have test coverage for this branch, however if the
+ # one should be added in the future we want to make sure its still checked.
+ for c in "/?#@:": # pragma: no branch
+ if c in normalized_netloc:
+ raise ValueError(
+ f"netloc '{netloc}' contains invalid "
+ "characters under NFKC normalization"
+ )
+
+
+@lru_cache # match the same size as urlsplit
+def split_netloc(
+ netloc: str,
+) -> tuple[Union[str, None], Union[str, None], Union[str, None], Union[int, None]]:
+ """Split netloc into username, password, host and port."""
+ if "@" not in netloc:
+ username: Union[str, None] = None
+ password: Union[str, None] = None
+ hostinfo = netloc
+ else:
+ userinfo, _, hostinfo = netloc.rpartition("@")
+ username, have_password, password = userinfo.partition(":")
+ if not have_password:
+ password = None
+
+ if "[" in hostinfo:
+ _, _, bracketed = hostinfo.partition("[")
+ hostname, _, port_str = bracketed.partition("]")
+ _, _, port_str = port_str.partition(":")
+ else:
+ hostname, _, port_str = hostinfo.partition(":")
+
+ if not port_str:
+ return username or None, password, hostname or None, None
+
+ try:
+ port = int(port_str)
+ except ValueError:
+ raise ValueError("Invalid URL: port can't be converted to integer")
+ if not (0 <= port <= 65535):
+ raise ValueError("Port out of range 0-65535")
+ return username or None, password, hostname or None, port
+
+
+def unsplit_result(
+ scheme: str, netloc: str, url: str, query: str, fragment: str
+) -> str:
+ """Unsplit a URL without any normalization."""
+ if netloc or (scheme and scheme in USES_AUTHORITY) or url[:2] == "//":
+ if url and url[:1] != "/":
+ url = f"{scheme}://{netloc}/{url}" if scheme else f"{scheme}:{url}"
+ else:
+ url = f"{scheme}://{netloc}{url}" if scheme else f"//{netloc}{url}"
+ elif scheme:
+ url = f"{scheme}:{url}"
+ if query:
+ url = f"{url}?{query}"
+ return f"{url}#{fragment}" if fragment else url
+
+
+@lru_cache # match the same size as urlsplit
+def make_netloc(
+ user: Union[str, None],
+ password: Union[str, None],
+ host: Union[str, None],
+ port: Union[int, None],
+ encode: bool = False,
+) -> str:
+ """Make netloc from parts.
+
+ The user and password are encoded if encode is True.
+
+ The host must already be encoded with _encode_host.
+ """
+ if host is None:
+ return ""
+ ret = host
+ if port is not None:
+ ret = f"{ret}:{port}"
+ if user is None and password is None:
+ return ret
+ if password is not None:
+ if not user:
+ user = ""
+ elif encode:
+ user = QUOTER(user)
+ if encode:
+ password = QUOTER(password)
+ user = f"{user}:{password}"
+ elif user and encode:
+ user = QUOTER(user)
+ return f"{user}@{ret}" if user else ret
+
+
+def query_to_pairs(query_string: str) -> list[tuple[str, str]]:
+ """Parse a query given as a string argument.
+
+ Works like urllib.parse.parse_qsl with keep empty values.
+ """
+ pairs: list[tuple[str, str]] = []
+ if not query_string:
+ return pairs
+ for k_v in query_string.split("&"):
+ k, _, v = k_v.partition("=")
+ pairs.append((UNQUOTER_PLUS(k), UNQUOTER_PLUS(v)))
+ return pairs
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_path.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_path.py"
new file mode 100644
index 0000000..c22f0b4
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_path.py"
@@ -0,0 +1,41 @@
+"""Utilities for working with paths."""
+
+from collections.abc import Sequence
+from contextlib import suppress
+
+
+def normalize_path_segments(segments: Sequence[str]) -> list[str]:
+ """Drop '.' and '..' from a sequence of str segments"""
+
+ resolved_path: list[str] = []
+
+ for seg in segments:
+ if seg == "..":
+ # ignore any .. segments that would otherwise cause an
+ # IndexError when popped from resolved_path if
+ # resolving for rfc3986
+ with suppress(IndexError):
+ resolved_path.pop()
+ elif seg != ".":
+ resolved_path.append(seg)
+
+ if segments and segments[-1] in (".", ".."):
+ # do some post-processing here.
+ # if the last segment was a relative dir,
+ # then we need to append the trailing '/'
+ resolved_path.append("")
+
+ return resolved_path
+
+
+def normalize_path(path: str) -> str:
+ # Drop '.' and '..' from str path
+ prefix = ""
+ if path and path[0] == "/":
+ # preserve the "/" root element of absolute paths, copying it to the
+ # normalised output as per sections 5.2.4 and 6.2.2.3 of rfc3986.
+ prefix = "/"
+ path = path[1:]
+
+ segments = path.split("/")
+ return prefix + "/".join(normalize_path_segments(segments))
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_query.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_query.py"
new file mode 100644
index 0000000..d911bcf
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_query.py"
@@ -0,0 +1,121 @@
+"""Query string handling."""
+
+import math
+from collections.abc import Iterable, Mapping, Sequence
+from typing import TYPE_CHECKING, Any, SupportsInt, Union, cast
+
+from multidict import istr
+
+from ._quoters import QUERY_PART_QUOTER, QUERY_QUOTER
+
+SimpleQuery = Union[str, SupportsInt, float]
+QueryVariable = Union[SimpleQuery, Sequence[SimpleQuery]]
+Query = Union[
+ None, str, Mapping[str, QueryVariable], Sequence[tuple[str, QueryVariable]]
+]
+
+
+def query_var(v: SimpleQuery) -> str:
+ """Convert a query variable to a string."""
+ cls = type(v)
+ if cls is int: # Fast path for non-subclassed int
+ return str(v)
+ if isinstance(v, str):
+ return v
+ if isinstance(v, float):
+ if math.isinf(v):
+ raise ValueError("float('inf') is not supported")
+ if math.isnan(v):
+ raise ValueError("float('nan') is not supported")
+ return str(float(v))
+ if cls is not bool and isinstance(v, SupportsInt):
+ return str(int(v))
+ raise TypeError(
+ "Invalid variable type: value "
+ "should be str, int or float, got {!r} "
+ "of type {}".format(v, cls)
+ )
+
+
+def get_str_query_from_sequence_iterable(
+ items: Iterable[tuple[Union[str, istr], QueryVariable]],
+) -> str:
+ """Return a query string from a sequence of (key, value) pairs.
+
+ value is a single value or a sequence of values for the key
+
+ The sequence of values must be a list or tuple.
+ """
+ quoter = QUERY_PART_QUOTER
+ pairs = [
+ f"{quoter(k)}={quoter(v if type(v) is str else query_var(v))}"
+ for k, val in items
+ for v in (
+ val if type(val) is not str and isinstance(val, (list, tuple)) else (val,)
+ )
+ ]
+ return "&".join(pairs)
+
+
+def get_str_query_from_iterable(
+ items: Iterable[tuple[Union[str, istr], SimpleQuery]],
+) -> str:
+ """Return a query string from an iterable.
+
+ The iterable must contain (key, value) pairs.
+
+ The values are not allowed to be sequences, only single values are
+ allowed. For sequences, use `_get_str_query_from_sequence_iterable`.
+ """
+ quoter = QUERY_PART_QUOTER
+ # A listcomp is used since listcomps are inlined on CPython 3.12+ and
+ # they are a bit faster than a generator expression.
+ pairs = [
+ f"{quoter(k)}={quoter(v if type(v) is str else query_var(v))}" for k, v in items
+ ]
+ return "&".join(pairs)
+
+
+def get_str_query(*args: Any, **kwargs: Any) -> Union[str, None]:
+ """Return a query string from supported args."""
+ query: Union[
+ str,
+ Mapping[str, QueryVariable],
+ Sequence[tuple[Union[str, istr], SimpleQuery]],
+ None,
+ ]
+ if kwargs:
+ if args:
+ msg = "Either kwargs or single query parameter must be present"
+ raise ValueError(msg)
+ query = kwargs
+ elif len(args) == 1:
+ query = args[0]
+ else:
+ raise ValueError("Either kwargs or single query parameter must be present")
+
+ if query is None:
+ return None
+ if not query:
+ return ""
+ if type(query) is dict:
+ return get_str_query_from_sequence_iterable(query.items())
+ if type(query) is str or isinstance(query, str):
+ return QUERY_QUOTER(query)
+ if isinstance(query, Mapping):
+ return get_str_query_from_sequence_iterable(query.items())
+ if isinstance(query, (bytes, bytearray, memoryview)):
+ msg = "Invalid query type: bytes, bytearray and memoryview are forbidden"
+ raise TypeError(msg)
+ if isinstance(query, Sequence):
+ # We don't expect sequence values if we're given a list of pairs
+ # already; only mappings like builtin `dict` which can't have the
+ # same key pointing to multiple values are allowed to use
+ # `_query_seq_pairs`.
+ if TYPE_CHECKING:
+ query = cast(Sequence[tuple[Union[str, istr], SimpleQuery]], query)
+ return get_str_query_from_iterable(query)
+ raise TypeError(
+ "Invalid query type: only str, mapping or "
+ "sequence of (key, value) pairs is allowed"
+ )
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoters.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoters.py"
new file mode 100644
index 0000000..0feb5b1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoters.py"
@@ -0,0 +1,33 @@
+"""Quoting and unquoting utilities for URL parts."""
+
+from typing import Union
+from urllib.parse import quote
+
+from ._quoting import _Quoter, _Unquoter
+
+QUOTER = _Quoter(requote=False)
+REQUOTER = _Quoter()
+PATH_QUOTER = _Quoter(safe="@:", protected="/+", requote=False)
+PATH_REQUOTER = _Quoter(safe="@:", protected="/+")
+QUERY_QUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True, requote=False)
+QUERY_REQUOTER = _Quoter(safe="?/:@", protected="=+&;", qs=True)
+QUERY_PART_QUOTER = _Quoter(safe="?/:@", qs=True, requote=False)
+FRAGMENT_QUOTER = _Quoter(safe="?/:@", requote=False)
+FRAGMENT_REQUOTER = _Quoter(safe="?/:@")
+
+UNQUOTER = _Unquoter()
+PATH_UNQUOTER = _Unquoter(unsafe="+")
+PATH_SAFE_UNQUOTER = _Unquoter(ignore="/%", unsafe="+")
+QS_UNQUOTER = _Unquoter(qs=True)
+UNQUOTER_PLUS = _Unquoter(plus=True) # to match urllib.parse.unquote_plus
+
+
+def human_quote(s: Union[str, None], unsafe: str) -> Union[str, None]:
+ if not s:
+ return s
+ for c in "%" + unsafe:
+ if c in s:
+ s = s.replace(c, f"%{ord(c):02X}")
+ if s.isprintable():
+ return s
+ return "".join(c if c.isprintable() else quote(c) for c in s)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting.py"
new file mode 100644
index 0000000..25d76c8
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting.py"
@@ -0,0 +1,19 @@
+import os
+import sys
+from typing import TYPE_CHECKING
+
+__all__ = ("_Quoter", "_Unquoter")
+
+
+NO_EXTENSIONS = bool(os.environ.get("YARL_NO_EXTENSIONS")) # type: bool
+if sys.implementation.name != "cpython":
+ NO_EXTENSIONS = True
+
+
+if TYPE_CHECKING or NO_EXTENSIONS:
+ from ._quoting_py import _Quoter, _Unquoter
+else:
+ try:
+ from ._quoting_c import _Quoter, _Unquoter
+ except ImportError: # pragma: no cover
+ from ._quoting_py import _Quoter, _Unquoter # type: ignore[assignment]
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.cp312-win_amd64.pyd" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.cp312-win_amd64.pyd"
new file mode 100644
index 0000000..228860c
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.cp312-win_amd64.pyd"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.pyx" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.pyx"
new file mode 100644
index 0000000..dacf6b0
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_c.pyx"
@@ -0,0 +1,451 @@
+from cpython.exc cimport PyErr_NoMemory
+from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
+from cpython.unicode cimport (
+ PyUnicode_DATA,
+ PyUnicode_DecodeASCII,
+ PyUnicode_DecodeUTF8Stateful,
+ PyUnicode_GET_LENGTH,
+ PyUnicode_KIND,
+ PyUnicode_READ,
+)
+from libc.stdint cimport uint8_t, uint64_t
+from libc.string cimport memcpy, memset
+
+from string import ascii_letters, digits
+
+
+cdef str GEN_DELIMS = ":/?#[]@"
+cdef str SUB_DELIMS_WITHOUT_QS = "!$'()*,"
+cdef str SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + '+?=;'
+cdef str RESERVED = GEN_DELIMS + SUB_DELIMS
+cdef str UNRESERVED = ascii_letters + digits + '-._~'
+cdef str ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS
+cdef str QS = '+&=;'
+
+DEF BUF_SIZE = 8 * 1024 # 8KiB
+
+cdef inline Py_UCS4 _to_hex(uint8_t v) noexcept:
+ if v < 10:
+ return <Py_UCS4>(v+0x30) # ord('0') == 0x30
+ else:
+ return <Py_UCS4>(v+0x41-10) # ord('A') == 0x41
+
+
+cdef inline int _from_hex(Py_UCS4 v) noexcept:
+ if '0' <= v <= '9':
+ return <int>(v) - 0x30 # ord('0') == 0x30
+ elif 'A' <= v <= 'F':
+ return <int>(v) - 0x41 + 10 # ord('A') == 0x41
+ elif 'a' <= v <= 'f':
+ return <int>(v) - 0x61 + 10 # ord('a') == 0x61
+ else:
+ return -1
+
+
+cdef inline int _is_lower_hex(Py_UCS4 v) noexcept:
+ return 'a' <= v <= 'f'
+
+
+cdef inline long _restore_ch(Py_UCS4 d1, Py_UCS4 d2):
+ cdef int digit1 = _from_hex(d1)
+ if digit1 < 0:
+ return -1
+ cdef int digit2 = _from_hex(d2)
+ if digit2 < 0:
+ return -1
+ return digit1 << 4 | digit2
+
+
+cdef uint8_t ALLOWED_TABLE[16]
+cdef uint8_t ALLOWED_NOTQS_TABLE[16]
+
+
+cdef inline bint bit_at(uint8_t array[], uint64_t ch) noexcept:
+ return array[ch >> 3] & (1 << (ch & 7))
+
+
+cdef inline void set_bit(uint8_t array[], uint64_t ch) noexcept:
+ array[ch >> 3] |= (1 << (ch & 7))
+
+
+memset(ALLOWED_TABLE, 0, sizeof(ALLOWED_TABLE))
+memset(ALLOWED_NOTQS_TABLE, 0, sizeof(ALLOWED_NOTQS_TABLE))
+
+for i in range(128):
+ if chr(i) in ALLOWED:
+ set_bit(ALLOWED_TABLE, i)
+ set_bit(ALLOWED_NOTQS_TABLE, i)
+ if chr(i) in QS:
+ set_bit(ALLOWED_NOTQS_TABLE, i)
+
+# ----------------- writer ---------------------------
+
+cdef struct Writer:
+ char *buf
+ bint heap_allocated_buf
+ Py_ssize_t size
+ Py_ssize_t pos
+ bint changed
+
+
+cdef inline void _init_writer(Writer* writer, char* buf):
+ writer.buf = buf
+ writer.heap_allocated_buf = False
+ writer.size = BUF_SIZE
+ writer.pos = 0
+ writer.changed = 0
+
+
+cdef inline void _release_writer(Writer* writer):
+ if writer.heap_allocated_buf:
+ PyMem_Free(writer.buf)
+
+
+cdef inline int _write_char(Writer* writer, Py_UCS4 ch, bint changed):
+ cdef char * buf
+ cdef Py_ssize_t size
+
+ if writer.pos == writer.size:
+ # reallocate
+ size = writer.size + BUF_SIZE
+ if not writer.heap_allocated_buf:
+ buf = <char*>PyMem_Malloc(size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ memcpy(buf, writer.buf, writer.size)
+ writer.heap_allocated_buf = True
+ else:
+ buf = <char*>PyMem_Realloc(writer.buf, size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ writer.buf = buf
+ writer.size = size
+ writer.buf[writer.pos] = <char>ch
+ writer.pos += 1
+ writer.changed |= changed
+ return 0
+
+
+cdef inline int _write_pct(Writer* writer, uint8_t ch, bint changed):
+ if _write_char(writer, '%', changed) < 0:
+ return -1
+ if _write_char(writer, _to_hex(<uint8_t>ch >> 4), changed) < 0:
+ return -1
+ return _write_char(writer, _to_hex(<uint8_t>ch & 0x0f), changed)
+
+
+cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
+ cdef uint64_t utf = <uint64_t> symbol
+
+ if utf < 0x80:
+ return _write_pct(writer, <uint8_t>utf, True)
+ elif utf < 0x800:
+ if _write_pct(writer, <uint8_t>(0xc0 | (utf >> 6)), True) < 0:
+ return -1
+ return _write_pct(writer, <uint8_t>(0x80 | (utf & 0x3f)), True)
+ elif 0xD800 <= utf <= 0xDFFF:
+ # surogate pair, ignored
+ return 0
+ elif utf < 0x10000:
+ if _write_pct(writer, <uint8_t>(0xe0 | (utf >> 12)), True) < 0:
+ return -1
+ if _write_pct(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f)),
+ True) < 0:
+ return -1
+ return _write_pct(writer, <uint8_t>(0x80 | (utf & 0x3f)), True)
+ elif utf > 0x10FFFF:
+ # symbol is too large
+ return 0
+ else:
+ if _write_pct(writer, <uint8_t>(0xf0 | (utf >> 18)), True) < 0:
+ return -1
+ if _write_pct(writer, <uint8_t>(0x80 | ((utf >> 12) & 0x3f)),
+ True) < 0:
+ return -1
+ if _write_pct(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f)),
+ True) < 0:
+ return -1
+ return _write_pct(writer, <uint8_t>(0x80 | (utf & 0x3f)), True)
+
+
+# --------------------- end writer --------------------------
+
+
+cdef class _Quoter:
+ cdef bint _qs
+ cdef bint _requote
+
+ cdef uint8_t _safe_table[16]
+ cdef uint8_t _protected_table[16]
+
+ def __init__(
+ self, *, str safe='', str protected='', bint qs=False, bint requote=True,
+ ):
+ cdef Py_UCS4 ch
+
+ self._qs = qs
+ self._requote = requote
+
+ if not self._qs:
+ memcpy(self._safe_table,
+ ALLOWED_NOTQS_TABLE,
+ sizeof(self._safe_table))
+ else:
+ memcpy(self._safe_table,
+ ALLOWED_TABLE,
+ sizeof(self._safe_table))
+ for ch in safe:
+ if ord(ch) > 127:
+ raise ValueError("Only safe symbols with ORD < 128 are allowed")
+ set_bit(self._safe_table, ch)
+
+ memset(self._protected_table, 0, sizeof(self._protected_table))
+ for ch in protected:
+ if ord(ch) > 127:
+ raise ValueError("Only safe symbols with ORD < 128 are allowed")
+ set_bit(self._safe_table, ch)
+ set_bit(self._protected_table, ch)
+
+ def __call__(self, val):
+ if val is None:
+ return None
+ if type(val) is not str:
+ if isinstance(val, str):
+ # derived from str
+ val = str(val)
+ else:
+ raise TypeError("Argument should be str")
+ return self._do_quote_or_skip(<str>val)
+
+ cdef str _do_quote_or_skip(self, str val):
+ cdef char[BUF_SIZE] buffer
+ cdef Py_UCS4 ch
+ cdef Py_ssize_t length = PyUnicode_GET_LENGTH(val)
+ cdef Py_ssize_t idx = length
+ cdef bint must_quote = 0
+ cdef Writer writer
+ cdef int kind = PyUnicode_KIND(val)
+ cdef const void *data = PyUnicode_DATA(val)
+
+ # If everything in the string is in the safe
+ # table and all ASCII, we can skip quoting
+ while idx:
+ idx -= 1
+ ch = PyUnicode_READ(kind, data, idx)
+ if ch >= 128 or not bit_at(self._safe_table, ch):
+ must_quote = 1
+ break
+
+ if not must_quote:
+ return val
+
+ _init_writer(&writer, &buffer[0])
+ try:
+ return self._do_quote(<str>val, length, kind, data, &writer)
+ finally:
+ _release_writer(&writer)
+
+ cdef str _do_quote(
+ self,
+ str val,
+ Py_ssize_t length,
+ int kind,
+ const void *data,
+ Writer *writer
+ ):
+ cdef Py_UCS4 ch
+ cdef long chl
+ cdef int changed
+ cdef Py_ssize_t idx = 0
+
+ while idx < length:
+ ch = PyUnicode_READ(kind, data, idx)
+ idx += 1
+ if ch == '%' and self._requote and idx <= length - 2:
+ chl = _restore_ch(
+ PyUnicode_READ(kind, data, idx),
+ PyUnicode_READ(kind, data, idx + 1)
+ )
+ if chl != -1:
+ ch = <Py_UCS4>chl
+ idx += 2
+ if ch < 128:
+ if bit_at(self._protected_table, ch):
+ if _write_pct(writer, ch, True) < 0:
+ raise
+ continue
+
+ if bit_at(self._safe_table, ch):
+ if _write_char(writer, ch, True) < 0:
+ raise
+ continue
+
+ changed = (_is_lower_hex(PyUnicode_READ(kind, data, idx - 2)) or
+ _is_lower_hex(PyUnicode_READ(kind, data, idx - 1)))
+ if _write_pct(writer, ch, changed) < 0:
+ raise
+ continue
+ else:
+ ch = '%'
+
+ if self._write(writer, ch) < 0:
+ raise
+
+ if not writer.changed:
+ return val
+ else:
+ return PyUnicode_DecodeASCII(writer.buf, writer.pos, "strict")
+
+ cdef inline int _write(self, Writer *writer, Py_UCS4 ch):
+ if self._qs:
+ if ch == ' ':
+ return _write_char(writer, '+', True)
+
+ if ch < 128 and bit_at(self._safe_table, ch):
+ return _write_char(writer, ch, False)
+
+ return _write_utf8(writer, ch)
+
+
+cdef class _Unquoter:
+ cdef str _ignore
+ cdef bint _has_ignore
+ cdef str _unsafe
+ cdef bytes _unsafe_bytes
+ cdef Py_ssize_t _unsafe_bytes_len
+ cdef const unsigned char * _unsafe_bytes_char
+ cdef bint _qs
+ cdef bint _plus # to match urllib.parse.unquote_plus
+ cdef _Quoter _quoter
+ cdef _Quoter _qs_quoter
+
+ def __init__(self, *, ignore="", unsafe="", qs=False, plus=False):
+ self._ignore = ignore
+ self._has_ignore = bool(self._ignore)
+ self._unsafe = unsafe
+ # unsafe may only be extended ascii characters (0-255)
+ self._unsafe_bytes = self._unsafe.encode('ascii')
+ self._unsafe_bytes_len = len(self._unsafe_bytes)
+ self._unsafe_bytes_char = self._unsafe_bytes
+ self._qs = qs
+ self._plus = plus
+ self._quoter = _Quoter()
+ self._qs_quoter = _Quoter(qs=True)
+
+ def __call__(self, val):
+ if val is None:
+ return None
+ if type(val) is not str:
+ if isinstance(val, str):
+ # derived from str
+ val = str(val)
+ else:
+ raise TypeError("Argument should be str")
+ return self._do_unquote(<str>val)
+
+ cdef str _do_unquote(self, str val):
+ cdef Py_ssize_t length = PyUnicode_GET_LENGTH(val)
+ if length == 0:
+ return val
+
+ cdef list ret = []
+ cdef char buffer[4]
+ cdef Py_ssize_t buflen = 0
+ cdef Py_ssize_t consumed
+ cdef str unquoted
+ cdef Py_UCS4 ch = 0
+ cdef long chl = 0
+ cdef Py_ssize_t idx = 0
+ cdef Py_ssize_t start_pct
+ cdef int kind = PyUnicode_KIND(val)
+ cdef const void *data = PyUnicode_DATA(val)
+ cdef bint changed = 0
+ while idx < length:
+ ch = PyUnicode_READ(kind, data, idx)
+ idx += 1
+ if ch == '%' and idx <= length - 2:
+ changed = 1
+ chl = _restore_ch(
+ PyUnicode_READ(kind, data, idx),
+ PyUnicode_READ(kind, data, idx + 1)
+ )
+ if chl != -1:
+ ch = <Py_UCS4>chl
+ idx += 2
+ assert buflen < 4
+ buffer[buflen] = ch
+ buflen += 1
+ try:
+ unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen,
+ NULL, &consumed)
+ except UnicodeDecodeError:
+ start_pct = idx - buflen * 3
+ buffer[0] = ch
+ buflen = 1
+ ret.append(val[start_pct : idx - 3])
+ try:
+ unquoted = PyUnicode_DecodeUTF8Stateful(buffer, buflen,
+ NULL, &consumed)
+ except UnicodeDecodeError:
+ buflen = 0
+ ret.append(val[idx - 3 : idx])
+ continue
+ if not unquoted:
+ assert consumed == 0
+ continue
+ assert consumed == buflen
+ buflen = 0
+ if self._qs and unquoted in '+=&;':
+ ret.append(self._qs_quoter(unquoted))
+ elif (
+ (self._unsafe_bytes_len and unquoted in self._unsafe) or
+ (self._has_ignore and unquoted in self._ignore)
+ ):
+ ret.append(self._quoter(unquoted))
+ else:
+ ret.append(unquoted)
+ continue
+ else:
+ ch = '%'
+
+ if buflen:
+ start_pct = idx - 1 - buflen * 3
+ ret.append(val[start_pct : idx - 1])
+ buflen = 0
+
+ if ch == '+':
+ if (
+ (not self._qs and not self._plus) or
+ (self._unsafe_bytes_len and self._is_char_unsafe(ch))
+ ):
+ ret.append('+')
+ else:
+ changed = 1
+ ret.append(' ')
+ continue
+
+ if self._unsafe_bytes_len and self._is_char_unsafe(ch):
+ changed = 1
+ ret.append('%')
+ h = hex(ord(ch)).upper()[2:]
+ for ch in h:
+ ret.append(ch)
+ continue
+
+ ret.append(ch)
+
+ if not changed:
+ return val
+
+ if buflen:
+ ret.append(val[length - buflen * 3 : length])
+
+ return ''.join(ret)
+
+ cdef inline bint _is_char_unsafe(self, Py_UCS4 ch):
+ for i in range(self._unsafe_bytes_len):
+ if ch == self._unsafe_bytes_char[i]:
+ return True
+ return False
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_py.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_py.py"
new file mode 100644
index 0000000..80bf07f
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_quoting_py.py"
@@ -0,0 +1,213 @@
+import codecs
+import re
+from string import ascii_letters, ascii_lowercase, digits
+from typing import Union, overload
+
+BASCII_LOWERCASE = ascii_lowercase.encode("ascii")
+BPCT_ALLOWED = {f"%{i:02X}".encode("ascii") for i in range(256)}
+GEN_DELIMS = ":/?#[]@"
+SUB_DELIMS_WITHOUT_QS = "!$'()*,"
+SUB_DELIMS = SUB_DELIMS_WITHOUT_QS + "+&=;"
+RESERVED = GEN_DELIMS + SUB_DELIMS
+UNRESERVED = ascii_letters + digits + "-._~"
+ALLOWED = UNRESERVED + SUB_DELIMS_WITHOUT_QS
+
+
+_IS_HEX = re.compile(b"[A-Z0-9][A-Z0-9]")
+_IS_HEX_STR = re.compile("[A-Fa-f0-9][A-Fa-f0-9]")
+
+utf8_decoder = codecs.getincrementaldecoder("utf-8")
+
+
+class _Quoter:
+ def __init__(
+ self,
+ *,
+ safe: str = "",
+ protected: str = "",
+ qs: bool = False,
+ requote: bool = True,
+ ) -> None:
+ self._safe = safe
+ self._protected = protected
+ self._qs = qs
+ self._requote = requote
+
+ @overload
+ def __call__(self, val: str) -> str: ...
+ @overload
+ def __call__(self, val: None) -> None: ...
+ def __call__(self, val: Union[str, None]) -> Union[str, None]:
+ if val is None:
+ return None
+ if not isinstance(val, str):
+ raise TypeError("Argument should be str")
+ if not val:
+ return ""
+ bval = val.encode("utf8", errors="ignore")
+ ret = bytearray()
+ pct = bytearray()
+ safe = self._safe
+ safe += ALLOWED
+ if not self._qs:
+ safe += "+&=;"
+ safe += self._protected
+ bsafe = safe.encode("ascii")
+ idx = 0
+ while idx < len(bval):
+ ch = bval[idx]
+ idx += 1
+
+ if pct:
+ if ch in BASCII_LOWERCASE:
+ ch = ch - 32 # convert to uppercase
+ pct.append(ch)
+ if len(pct) == 3: # pragma: no branch # peephole optimizer
+ buf = pct[1:]
+ if not _IS_HEX.match(buf):
+ ret.extend(b"%25")
+ pct.clear()
+ idx -= 2
+ continue
+ try:
+ unquoted = chr(int(pct[1:].decode("ascii"), base=16))
+ except ValueError:
+ ret.extend(b"%25")
+ pct.clear()
+ idx -= 2
+ continue
+
+ if unquoted in self._protected:
+ ret.extend(pct)
+ elif unquoted in safe:
+ ret.append(ord(unquoted))
+ else:
+ ret.extend(pct)
+ pct.clear()
+
+ # special case, if we have only one char after "%"
+ elif len(pct) == 2 and idx == len(bval):
+ ret.extend(b"%25")
+ pct.clear()
+ idx -= 1
+
+ continue
+
+ elif ch == ord("%") and self._requote:
+ pct.clear()
+ pct.append(ch)
+
+ # special case if "%" is last char
+ if idx == len(bval):
+ ret.extend(b"%25")
+
+ continue
+
+ if self._qs and ch == ord(" "):
+ ret.append(ord("+"))
+ continue
+ if ch in bsafe:
+ ret.append(ch)
+ continue
+
+ ret.extend((f"%{ch:02X}").encode("ascii"))
+
+ ret2 = ret.decode("ascii")
+ if ret2 == val:
+ return val
+ return ret2
+
+
+class _Unquoter:
+ def __init__(
+ self,
+ *,
+ ignore: str = "",
+ unsafe: str = "",
+ qs: bool = False,
+ plus: bool = False,
+ ) -> None:
+ self._ignore = ignore
+ self._unsafe = unsafe
+ self._qs = qs
+ self._plus = plus # to match urllib.parse.unquote_plus
+ self._quoter = _Quoter()
+ self._qs_quoter = _Quoter(qs=True)
+
+ @overload
+ def __call__(self, val: str) -> str: ...
+ @overload
+ def __call__(self, val: None) -> None: ...
+ def __call__(self, val: Union[str, None]) -> Union[str, None]:
+ if val is None:
+ return None
+ if not isinstance(val, str):
+ raise TypeError("Argument should be str")
+ if not val:
+ return ""
+ decoder = utf8_decoder()
+ ret = []
+ idx = 0
+ while idx < len(val):
+ ch = val[idx]
+ idx += 1
+ if ch == "%" and idx <= len(val) - 2:
+ pct = val[idx : idx + 2]
+ if _IS_HEX_STR.fullmatch(pct):
+ b = bytes([int(pct, base=16)])
+ idx += 2
+ try:
+ unquoted = decoder.decode(b)
+ except UnicodeDecodeError:
+ start_pct = idx - 3 - len(decoder.buffer) * 3
+ ret.append(val[start_pct : idx - 3])
+ decoder.reset()
+ try:
+ unquoted = decoder.decode(b)
+ except UnicodeDecodeError:
+ ret.append(val[idx - 3 : idx])
+ continue
+ if not unquoted:
+ continue
+ if self._qs and unquoted in "+=&;":
+ to_add = self._qs_quoter(unquoted)
+ if to_add is None: # pragma: no cover
+ raise RuntimeError("Cannot quote None")
+ ret.append(to_add)
+ elif unquoted in self._unsafe or unquoted in self._ignore:
+ to_add = self._quoter(unquoted)
+ if to_add is None: # pragma: no cover
+ raise RuntimeError("Cannot quote None")
+ ret.append(to_add)
+ else:
+ ret.append(unquoted)
+ continue
+
+ if decoder.buffer:
+ start_pct = idx - 1 - len(decoder.buffer) * 3
+ ret.append(val[start_pct : idx - 1])
+ decoder.reset()
+
+ if ch == "+":
+ if (not self._qs and not self._plus) or ch in self._unsafe:
+ ret.append("+")
+ else:
+ ret.append(" ")
+ continue
+
+ if ch in self._unsafe:
+ ret.append("%")
+ h = hex(ord(ch)).upper()[2:]
+ for ch in h:
+ ret.append(ch)
+ continue
+
+ ret.append(ch)
+
+ if decoder.buffer:
+ ret.append(val[-len(decoder.buffer) * 3 :])
+
+ ret2 = "".join(ret)
+ if ret2 == val:
+ return val
+ return ret2
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_url.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_url.py"
new file mode 100644
index 0000000..527a576
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/_url.py"
@@ -0,0 +1,1622 @@
+import re
+import sys
+import warnings
+from collections.abc import Mapping, Sequence
+from enum import Enum
+from functools import _CacheInfo, lru_cache
+from ipaddress import ip_address
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ NoReturn,
+ TypedDict,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+from urllib.parse import SplitResult, uses_relative
+
+import idna
+from multidict import MultiDict, MultiDictProxy, istr
+from propcache.api import under_cached_property as cached_property
+
+from ._parse import (
+ USES_AUTHORITY,
+ SplitURLType,
+ make_netloc,
+ query_to_pairs,
+ split_netloc,
+ split_url,
+ unsplit_result,
+)
+from ._path import normalize_path, normalize_path_segments
+from ._query import (
+ Query,
+ QueryVariable,
+ SimpleQuery,
+ get_str_query,
+ get_str_query_from_iterable,
+ get_str_query_from_sequence_iterable,
+)
+from ._quoters import (
+ FRAGMENT_QUOTER,
+ FRAGMENT_REQUOTER,
+ PATH_QUOTER,
+ PATH_REQUOTER,
+ PATH_SAFE_UNQUOTER,
+ PATH_UNQUOTER,
+ QS_UNQUOTER,
+ QUERY_QUOTER,
+ QUERY_REQUOTER,
+ QUOTER,
+ REQUOTER,
+ UNQUOTER,
+ human_quote,
+)
+
+DEFAULT_PORTS = {"http": 80, "https": 443, "ws": 80, "wss": 443, "ftp": 21}
+USES_RELATIVE = frozenset(uses_relative)
+
+# Special schemes https://url.spec.whatwg.org/#special-scheme
+# are not allowed to have an empty host https://url.spec.whatwg.org/#url-representation
+SCHEME_REQUIRES_HOST = frozenset(("http", "https", "ws", "wss", "ftp"))
+
+
+# reg-name: unreserved / pct-encoded / sub-delims
+# this pattern matches anything that is *not* in those classes. and is only used
+# on lower-cased ASCII values.
+NOT_REG_NAME = re.compile(
+ r"""
+ # any character not in the unreserved or sub-delims sets, plus %
+ # (validated with the additional check for pct-encoded sequences below)
+ [^a-z0-9\-._~!$&'()*+,;=%]
+ |
+ # % only allowed if it is part of a pct-encoded
+ # sequence of 2 hex digits.
+ %(?![0-9a-f]{2})
+ """,
+ re.VERBOSE,
+)
+
+_T = TypeVar("_T")
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ Self = Any
+
+
+class UndefinedType(Enum):
+ """Singleton type for use with not set sentinel values."""
+
+ _singleton = 0
+
+
+UNDEFINED = UndefinedType._singleton
+
+
+class CacheInfo(TypedDict):
+ """Host encoding cache."""
+
+ idna_encode: _CacheInfo
+ idna_decode: _CacheInfo
+ ip_address: _CacheInfo
+ host_validate: _CacheInfo
+ encode_host: _CacheInfo
+
+
+class _InternalURLCache(TypedDict, total=False):
+ _val: SplitURLType
+ _origin: "URL"
+ absolute: bool
+ hash: int
+ scheme: str
+ raw_authority: str
+ authority: str
+ raw_user: Union[str, None]
+ user: Union[str, None]
+ raw_password: Union[str, None]
+ password: Union[str, None]
+ raw_host: Union[str, None]
+ host: Union[str, None]
+ host_subcomponent: Union[str, None]
+ host_port_subcomponent: Union[str, None]
+ port: Union[int, None]
+ explicit_port: Union[int, None]
+ raw_path: str
+ path: str
+ _parsed_query: list[tuple[str, str]]
+ query: "MultiDictProxy[str]"
+ raw_query_string: str
+ query_string: str
+ path_qs: str
+ raw_path_qs: str
+ raw_fragment: str
+ fragment: str
+ raw_parts: tuple[str, ...]
+ parts: tuple[str, ...]
+ parent: "URL"
+ raw_name: str
+ name: str
+ raw_suffix: str
+ suffix: str
+ raw_suffixes: tuple[str, ...]
+ suffixes: tuple[str, ...]
+
+
+def rewrite_module(obj: _T) -> _T:
+ obj.__module__ = "yarl"
+ return obj
+
+
+@lru_cache
+def encode_url(url_str: str) -> "URL":
+ """Parse unencoded URL."""
+ cache: _InternalURLCache = {}
+ host: Union[str, None]
+ scheme, netloc, path, query, fragment = split_url(url_str)
+ if not netloc: # netloc
+ host = ""
+ else:
+ if ":" in netloc or "@" in netloc or "[" in netloc:
+ # Complex netloc
+ username, password, host, port = split_netloc(netloc)
+ else:
+ username = password = port = None
+ host = netloc
+ if host is None:
+ if scheme in SCHEME_REQUIRES_HOST:
+ msg = (
+ "Invalid URL: host is required for "
+ f"absolute urls with the {scheme} scheme"
+ )
+ raise ValueError(msg)
+ else:
+ host = ""
+ host = _encode_host(host, validate_host=False)
+ # Remove brackets as host encoder adds back brackets for IPv6 addresses
+ cache["raw_host"] = host[1:-1] if "[" in host else host
+ cache["explicit_port"] = port
+ if password is None and username is None:
+ # Fast path for URLs without user, password
+ netloc = host if port is None else f"{host}:{port}"
+ cache["raw_user"] = None
+ cache["raw_password"] = None
+ else:
+ raw_user = REQUOTER(username) if username else username
+ raw_password = REQUOTER(password) if password else password
+ netloc = make_netloc(raw_user, raw_password, host, port)
+ cache["raw_user"] = raw_user
+ cache["raw_password"] = raw_password
+
+ if path:
+ path = PATH_REQUOTER(path)
+ if netloc and "." in path:
+ path = normalize_path(path)
+ if query:
+ query = QUERY_REQUOTER(query)
+ if fragment:
+ fragment = FRAGMENT_REQUOTER(fragment)
+
+ cache["scheme"] = scheme
+ cache["raw_path"] = "/" if not path and netloc else path
+ cache["raw_query_string"] = query
+ cache["raw_fragment"] = fragment
+
+ self = object.__new__(URL)
+ self._scheme = scheme
+ self._netloc = netloc
+ self._path = path
+ self._query = query
+ self._fragment = fragment
+ self._cache = cache
+ return self
+
+
+@lru_cache
+def pre_encoded_url(url_str: str) -> "URL":
+ """Parse pre-encoded URL."""
+ self = object.__new__(URL)
+ val = split_url(url_str)
+ self._scheme, self._netloc, self._path, self._query, self._fragment = val
+ self._cache = {}
+ return self
+
+
+@lru_cache
+def build_pre_encoded_url(
+ scheme: str,
+ authority: str,
+ user: Union[str, None],
+ password: Union[str, None],
+ host: str,
+ port: Union[int, None],
+ path: str,
+ query_string: str,
+ fragment: str,
+) -> "URL":
+ """Build a pre-encoded URL from parts."""
+ self = object.__new__(URL)
+ self._scheme = scheme
+ if authority:
+ self._netloc = authority
+ elif host:
+ if port is not None:
+ port = None if port == DEFAULT_PORTS.get(scheme) else port
+ if user is None and password is None:
+ self._netloc = host if port is None else f"{host}:{port}"
+ else:
+ self._netloc = make_netloc(user, password, host, port)
+ else:
+ self._netloc = ""
+ self._path = path
+ self._query = query_string
+ self._fragment = fragment
+ self._cache = {}
+ return self
+
+
+def from_parts_uncached(
+ scheme: str, netloc: str, path: str, query: str, fragment: str
+) -> "URL":
+ """Create a new URL from parts."""
+ self = object.__new__(URL)
+ self._scheme = scheme
+ self._netloc = netloc
+ self._path = path
+ self._query = query
+ self._fragment = fragment
+ self._cache = {}
+ return self
+
+
+from_parts = lru_cache(from_parts_uncached)
+
+
+@rewrite_module
+class URL:
+ # Don't derive from str
+ # follow pathlib.Path design
+ # probably URL will not suffer from pathlib problems:
+ # it's intended for libraries like aiohttp,
+ # not to be passed into standard library functions like os.open etc.
+
+ # URL grammar (RFC 3986)
+ # pct-encoded = "%" HEXDIG HEXDIG
+ # reserved = gen-delims / sub-delims
+ # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
+ # sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
+ # / "*" / "+" / "," / ";" / "="
+ # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
+ # URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
+ # hier-part = "//" authority path-abempty
+ # / path-absolute
+ # / path-rootless
+ # / path-empty
+ # scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
+ # authority = [ userinfo "@" ] host [ ":" port ]
+ # userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
+ # host = IP-literal / IPv4address / reg-name
+ # IP-literal = "[" ( IPv6address / IPvFuture ) "]"
+ # IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" )
+ # IPv6address = 6( h16 ":" ) ls32
+ # / "::" 5( h16 ":" ) ls32
+ # / [ h16 ] "::" 4( h16 ":" ) ls32
+ # / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ # / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ # / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ # / [ *4( h16 ":" ) h16 ] "::" ls32
+ # / [ *5( h16 ":" ) h16 ] "::" h16
+ # / [ *6( h16 ":" ) h16 ] "::"
+ # ls32 = ( h16 ":" h16 ) / IPv4address
+ # ; least-significant 32 bits of address
+ # h16 = 1*4HEXDIG
+ # ; 16 bits of address represented in hexadecimal
+ # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
+ # dec-octet = DIGIT ; 0-9
+ # / %x31-39 DIGIT ; 10-99
+ # / "1" 2DIGIT ; 100-199
+ # / "2" %x30-34 DIGIT ; 200-249
+ # / "25" %x30-35 ; 250-255
+ # reg-name = *( unreserved / pct-encoded / sub-delims )
+ # port = *DIGIT
+ # path = path-abempty ; begins with "/" or is empty
+ # / path-absolute ; begins with "/" but not "//"
+ # / path-noscheme ; begins with a non-colon segment
+ # / path-rootless ; begins with a segment
+ # / path-empty ; zero characters
+ # path-abempty = *( "/" segment )
+ # path-absolute = "/" [ segment-nz *( "/" segment ) ]
+ # path-noscheme = segment-nz-nc *( "/" segment )
+ # path-rootless = segment-nz *( "/" segment )
+ # path-empty = 0<pchar>
+ # segment = *pchar
+ # segment-nz = 1*pchar
+ # segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
+ # ; non-zero-length segment without any colon ":"
+ # pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
+ # query = *( pchar / "/" / "?" )
+ # fragment = *( pchar / "/" / "?" )
+ # URI-reference = URI / relative-ref
+ # relative-ref = relative-part [ "?" query ] [ "#" fragment ]
+ # relative-part = "//" authority path-abempty
+ # / path-absolute
+ # / path-noscheme
+ # / path-empty
+ # absolute-URI = scheme ":" hier-part [ "?" query ]
+ __slots__ = ("_cache", "_scheme", "_netloc", "_path", "_query", "_fragment")
+
+ _cache: _InternalURLCache
+ _scheme: str
+ _netloc: str
+ _path: str
+ _query: str
+ _fragment: str
+
+ def __new__(
+ cls,
+ val: Union[str, SplitResult, "URL", UndefinedType] = UNDEFINED,
+ *,
+ encoded: bool = False,
+ strict: Union[bool, None] = None,
+ ) -> "URL":
+ if strict is not None: # pragma: no cover
+ warnings.warn("strict parameter is ignored")
+ if type(val) is str:
+ return pre_encoded_url(val) if encoded else encode_url(val)
+ if type(val) is cls:
+ return val
+ if type(val) is SplitResult:
+ if not encoded:
+ raise ValueError("Cannot apply decoding to SplitResult")
+ return from_parts(*val)
+ if isinstance(val, str):
+ return pre_encoded_url(str(val)) if encoded else encode_url(str(val))
+ if val is UNDEFINED:
+ # Special case for UNDEFINED since it might be unpickling and we do
+ # not want to cache as the `__set_state__` call would mutate the URL
+ # object in the `pre_encoded_url` or `encoded_url` caches.
+ self = object.__new__(URL)
+ self._scheme = self._netloc = self._path = self._query = self._fragment = ""
+ self._cache = {}
+ return self
+ raise TypeError("Constructor parameter should be str")
+
+ @classmethod
+ def build(
+ cls,
+ *,
+ scheme: str = "",
+ authority: str = "",
+ user: Union[str, None] = None,
+ password: Union[str, None] = None,
+ host: str = "",
+ port: Union[int, None] = None,
+ path: str = "",
+ query: Union[Query, None] = None,
+ query_string: str = "",
+ fragment: str = "",
+ encoded: bool = False,
+ ) -> "URL":
+ """Creates and returns a new URL"""
+
+ if authority and (user or password or host or port):
+ raise ValueError(
+ 'Can\'t mix "authority" with "user", "password", "host" or "port".'
+ )
+ if port is not None and not isinstance(port, int):
+ raise TypeError(f"The port is required to be int, got {type(port)!r}.")
+ if port and not host:
+ raise ValueError('Can\'t build URL with "port" but without "host".')
+ if query and query_string:
+ raise ValueError('Only one of "query" or "query_string" should be passed')
+ if (
+ scheme is None # type: ignore[redundant-expr]
+ or authority is None # type: ignore[redundant-expr]
+ or host is None # type: ignore[redundant-expr]
+ or path is None # type: ignore[redundant-expr]
+ or query_string is None # type: ignore[redundant-expr]
+ or fragment is None
+ ):
+ raise TypeError(
+ 'NoneType is illegal for "scheme", "authority", "host", "path", '
+ '"query_string", and "fragment" args, use empty string instead.'
+ )
+
+ if query:
+ query_string = get_str_query(query) or ""
+
+ if encoded:
+ return build_pre_encoded_url(
+ scheme,
+ authority,
+ user,
+ password,
+ host,
+ port,
+ path,
+ query_string,
+ fragment,
+ )
+
+ self = object.__new__(URL)
+ self._scheme = scheme
+ _host: Union[str, None] = None
+ if authority:
+ user, password, _host, port = split_netloc(authority)
+ _host = _encode_host(_host, validate_host=False) if _host else ""
+ elif host:
+ _host = _encode_host(host, validate_host=True)
+ else:
+ self._netloc = ""
+
+ if _host is not None:
+ if port is not None:
+ port = None if port == DEFAULT_PORTS.get(scheme) else port
+ if user is None and password is None:
+ self._netloc = _host if port is None else f"{_host}:{port}"
+ else:
+ self._netloc = make_netloc(user, password, _host, port, True)
+
+ path = PATH_QUOTER(path) if path else path
+ if path and self._netloc:
+ if "." in path:
+ path = normalize_path(path)
+ if path[0] != "/":
+ msg = (
+ "Path in a URL with authority should "
+ "start with a slash ('/') if set"
+ )
+ raise ValueError(msg)
+
+ self._path = path
+ if not query and query_string:
+ query_string = QUERY_QUOTER(query_string)
+ self._query = query_string
+ self._fragment = FRAGMENT_QUOTER(fragment) if fragment else fragment
+ self._cache = {}
+ return self
+
+ def __init_subclass__(cls) -> NoReturn:
+ raise TypeError(f"Inheriting a class {cls!r} from URL is forbidden")
+
+ def __str__(self) -> str:
+ if not self._path and self._netloc and (self._query or self._fragment):
+ path = "/"
+ else:
+ path = self._path
+ if (port := self.explicit_port) is not None and port == DEFAULT_PORTS.get(
+ self._scheme
+ ):
+ # port normalization - using None for default ports to remove from rendering
+ # https://datatracker.ietf.org/doc/html/rfc3986.html#section-6.2.3
+ host = self.host_subcomponent
+ netloc = make_netloc(self.raw_user, self.raw_password, host, None)
+ else:
+ netloc = self._netloc
+ return unsplit_result(self._scheme, netloc, path, self._query, self._fragment)
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}('{str(self)}')"
+
+ def __bytes__(self) -> bytes:
+ return str(self).encode("ascii")
+
+ def __eq__(self, other: object) -> bool:
+ if type(other) is not URL:
+ return NotImplemented
+
+ path1 = "/" if not self._path and self._netloc else self._path
+ path2 = "/" if not other._path and other._netloc else other._path
+ return (
+ self._scheme == other._scheme
+ and self._netloc == other._netloc
+ and path1 == path2
+ and self._query == other._query
+ and self._fragment == other._fragment
+ )
+
+ def __hash__(self) -> int:
+ if (ret := self._cache.get("hash")) is None:
+ path = "/" if not self._path and self._netloc else self._path
+ ret = self._cache["hash"] = hash(
+ (self._scheme, self._netloc, path, self._query, self._fragment)
+ )
+ return ret
+
+ def __le__(self, other: object) -> bool:
+ if type(other) is not URL:
+ return NotImplemented
+ return self._val <= other._val
+
+ def __lt__(self, other: object) -> bool:
+ if type(other) is not URL:
+ return NotImplemented
+ return self._val < other._val
+
+ def __ge__(self, other: object) -> bool:
+ if type(other) is not URL:
+ return NotImplemented
+ return self._val >= other._val
+
+ def __gt__(self, other: object) -> bool:
+ if type(other) is not URL:
+ return NotImplemented
+ return self._val > other._val
+
+ def __truediv__(self, name: str) -> "URL":
+ if not isinstance(name, str):
+ return NotImplemented # type: ignore[unreachable]
+ return self._make_child((str(name),))
+
+ def __mod__(self, query: Query) -> "URL":
+ return self.update_query(query)
+
+ def __bool__(self) -> bool:
+ return bool(self._netloc or self._path or self._query or self._fragment)
+
+ def __getstate__(self) -> tuple[SplitResult]:
+ return (tuple.__new__(SplitResult, self._val),)
+
+ def __setstate__(
+ self, state: Union[tuple[SplitURLType], tuple[None, _InternalURLCache]]
+ ) -> None:
+ if state[0] is None and isinstance(state[1], dict):
+ # default style pickle
+ val = state[1]["_val"]
+ else:
+ unused: list[object]
+ val, *unused = state
+ self._scheme, self._netloc, self._path, self._query, self._fragment = val
+ self._cache = {}
+
+ def _cache_netloc(self) -> None:
+ """Cache the netloc parts of the URL."""
+ c = self._cache
+ split_loc = split_netloc(self._netloc)
+ c["raw_user"], c["raw_password"], c["raw_host"], c["explicit_port"] = split_loc
+
+ def is_absolute(self) -> bool:
+ """A check for absolute URLs.
+
+ Return True for absolute ones (having scheme or starting
+ with //), False otherwise.
+
+ Is is preferred to call the .absolute property instead
+ as it is cached.
+ """
+ return self.absolute
+
+ def is_default_port(self) -> bool:
+ """A check for default port.
+
+ Return True if port is default for specified scheme,
+ e.g. 'http://python.org' or 'http://python.org:80', False
+ otherwise.
+
+ Return False for relative URLs.
+
+ """
+ if (explicit := self.explicit_port) is None:
+ # If the explicit port is None, then the URL must be
+ # using the default port unless its a relative URL
+ # which does not have an implicit port / default port
+ return self._netloc != ""
+ return explicit == DEFAULT_PORTS.get(self._scheme)
+
+ def origin(self) -> "URL":
+ """Return an URL with scheme, host and port parts only.
+
+ user, password, path, query and fragment are removed.
+
+ """
+ # TODO: add a keyword-only option for keeping user/pass maybe?
+ return self._origin
+
+ @cached_property
+ def _val(self) -> SplitURLType:
+ return (self._scheme, self._netloc, self._path, self._query, self._fragment)
+
+ @cached_property
+ def _origin(self) -> "URL":
+ """Return an URL with scheme, host and port parts only.
+
+ user, password, path, query and fragment are removed.
+ """
+ if not (netloc := self._netloc):
+ raise ValueError("URL should be absolute")
+ if not (scheme := self._scheme):
+ raise ValueError("URL should have scheme")
+ if "@" in netloc:
+ encoded_host = self.host_subcomponent
+ netloc = make_netloc(None, None, encoded_host, self.explicit_port)
+ elif not self._path and not self._query and not self._fragment:
+ return self
+ return from_parts(scheme, netloc, "", "", "")
+
+ def relative(self) -> "URL":
+ """Return a relative part of the URL.
+
+ scheme, user, password, host and port are removed.
+
+ """
+ if not self._netloc:
+ raise ValueError("URL should be absolute")
+ return from_parts("", "", self._path, self._query, self._fragment)
+
+ @cached_property
+ def absolute(self) -> bool:
+ """A check for absolute URLs.
+
+ Return True for absolute ones (having scheme or starting
+ with //), False otherwise.
+
+ """
+ # `netloc`` is an empty string for relative URLs
+ # Checking `netloc` is faster than checking `hostname`
+ # because `hostname` is a property that does some extra work
+ # to parse the host from the `netloc`
+ return self._netloc != ""
+
+ @cached_property
+ def scheme(self) -> str:
+ """Scheme for absolute URLs.
+
+ Empty string for relative URLs or URLs starting with //
+
+ """
+ return self._scheme
+
+ @cached_property
+ def raw_authority(self) -> str:
+ """Encoded authority part of URL.
+
+ Empty string for relative URLs.
+
+ """
+ return self._netloc
+
+ @cached_property
+ def authority(self) -> str:
+ """Decoded authority part of URL.
+
+ Empty string for relative URLs.
+
+ """
+ return make_netloc(self.user, self.password, self.host, self.port)
+
+ @cached_property
+ def raw_user(self) -> Union[str, None]:
+ """Encoded user part of URL.
+
+ None if user is missing.
+
+ """
+ # not .username
+ self._cache_netloc()
+ return self._cache["raw_user"]
+
+ @cached_property
+ def user(self) -> Union[str, None]:
+ """Decoded user part of URL.
+
+ None if user is missing.
+
+ """
+ if (raw_user := self.raw_user) is None:
+ return None
+ return UNQUOTER(raw_user)
+
+ @cached_property
+ def raw_password(self) -> Union[str, None]:
+ """Encoded password part of URL.
+
+ None if password is missing.
+
+ """
+ self._cache_netloc()
+ return self._cache["raw_password"]
+
+ @cached_property
+ def password(self) -> Union[str, None]:
+ """Decoded password part of URL.
+
+ None if password is missing.
+
+ """
+ if (raw_password := self.raw_password) is None:
+ return None
+ return UNQUOTER(raw_password)
+
+ @cached_property
+ def raw_host(self) -> Union[str, None]:
+ """Encoded host part of URL.
+
+ None for relative URLs.
+
+ When working with IPv6 addresses, use the `host_subcomponent` property instead
+ as it will return the host subcomponent with brackets.
+ """
+ # Use host instead of hostname for sake of shortness
+ # May add .hostname prop later
+ self._cache_netloc()
+ return self._cache["raw_host"]
+
+ @cached_property
+ def host(self) -> Union[str, None]:
+ """Decoded host part of URL.
+
+ None for relative URLs.
+
+ """
+ if (raw := self.raw_host) is None:
+ return None
+ if raw and raw[-1].isdigit() or ":" in raw:
+ # IP addresses are never IDNA encoded
+ return raw
+ return _idna_decode(raw)
+
+ @cached_property
+ def host_subcomponent(self) -> Union[str, None]:
+ """Return the host subcomponent part of URL.
+
+ None for relative URLs.
+
+ https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.2
+
+ `IP-literal = "[" ( IPv6address / IPvFuture ) "]"`
+
+ Examples:
+ - `http://example.com:8080` -> `example.com`
+ - `http://example.com:80` -> `example.com`
+ - `https://127.0.0.1:8443` -> `127.0.0.1`
+ - `https://[::1]:8443` -> `[::1]`
+ - `http://[::1]` -> `[::1]`
+
+ """
+ if (raw := self.raw_host) is None:
+ return None
+ return f"[{raw}]" if ":" in raw else raw
+
+ @cached_property
+ def host_port_subcomponent(self) -> Union[str, None]:
+ """Return the host and port subcomponent part of URL.
+
+ Trailing dots are removed from the host part.
+
+ This value is suitable for use in the Host header of an HTTP request.
+
+ None for relative URLs.
+
+ https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.2
+ `IP-literal = "[" ( IPv6address / IPvFuture ) "]"`
+ https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.3
+ port = *DIGIT
+
+ Examples:
+ - `http://example.com:8080` -> `example.com:8080`
+ - `http://example.com:80` -> `example.com`
+ - `http://example.com.:80` -> `example.com`
+ - `https://127.0.0.1:8443` -> `127.0.0.1:8443`
+ - `https://[::1]:8443` -> `[::1]:8443`
+ - `http://[::1]` -> `[::1]`
+
+ """
+ if (raw := self.raw_host) is None:
+ return None
+ if raw[-1] == ".":
+ # Remove all trailing dots from the netloc as while
+ # they are valid FQDNs in DNS, TLS validation fails.
+ # See https://github.com/aio-libs/aiohttp/issues/3636.
+ # To avoid string manipulation we only call rstrip if
+ # the last character is a dot.
+ raw = raw.rstrip(".")
+ port = self.explicit_port
+ if port is None or port == DEFAULT_PORTS.get(self._scheme):
+ return f"[{raw}]" if ":" in raw else raw
+ return f"[{raw}]:{port}" if ":" in raw else f"{raw}:{port}"
+
+ @cached_property
+ def port(self) -> Union[int, None]:
+ """Port part of URL, with scheme-based fallback.
+
+ None for relative URLs or URLs without explicit port and
+ scheme without default port substitution.
+
+ """
+ if (explicit_port := self.explicit_port) is not None:
+ return explicit_port
+ return DEFAULT_PORTS.get(self._scheme)
+
+ @cached_property
+ def explicit_port(self) -> Union[int, None]:
+ """Port part of URL, without scheme-based fallback.
+
+ None for relative URLs or URLs without explicit port.
+
+ """
+ self._cache_netloc()
+ return self._cache["explicit_port"]
+
+ @cached_property
+ def raw_path(self) -> str:
+ """Encoded path of URL.
+
+ / for absolute URLs without path part.
+
+ """
+ return self._path if self._path or not self._netloc else "/"
+
+ @cached_property
+ def path(self) -> str:
+ """Decoded path of URL.
+
+ / for absolute URLs without path part.
+
+ """
+ return PATH_UNQUOTER(self._path) if self._path else "/" if self._netloc else ""
+
+ @cached_property
+ def path_safe(self) -> str:
+ """Decoded path of URL.
+
+ / for absolute URLs without path part.
+
+ / (%2F) and % (%25) are not decoded
+
+ """
+ if self._path:
+ return PATH_SAFE_UNQUOTER(self._path)
+ return "/" if self._netloc else ""
+
+ @cached_property
+ def _parsed_query(self) -> list[tuple[str, str]]:
+ """Parse query part of URL."""
+ return query_to_pairs(self._query)
+
+ @cached_property
+ def query(self) -> "MultiDictProxy[str]":
+ """A MultiDictProxy representing parsed query parameters in decoded
+ representation.
+
+ Empty value if URL has no query part.
+
+ """
+ return MultiDictProxy(MultiDict(self._parsed_query))
+
+ @cached_property
+ def raw_query_string(self) -> str:
+ """Encoded query part of URL.
+
+ Empty string if query is missing.
+
+ """
+ return self._query
+
+ @cached_property
+ def query_string(self) -> str:
+ """Decoded query part of URL.
+
+ Empty string if query is missing.
+
+ """
+ return QS_UNQUOTER(self._query) if self._query else ""
+
+ @cached_property
+ def path_qs(self) -> str:
+ """Decoded path of URL with query."""
+ return self.path if not (q := self.query_string) else f"{self.path}?{q}"
+
+ @cached_property
+ def raw_path_qs(self) -> str:
+ """Encoded path of URL with query."""
+ if q := self._query:
+ return f"{self._path}?{q}" if self._path or not self._netloc else f"/?{q}"
+ return self._path if self._path or not self._netloc else "/"
+
+ @cached_property
+ def raw_fragment(self) -> str:
+ """Encoded fragment part of URL.
+
+ Empty string if fragment is missing.
+
+ """
+ return self._fragment
+
+ @cached_property
+ def fragment(self) -> str:
+ """Decoded fragment part of URL.
+
+ Empty string if fragment is missing.
+
+ """
+ return UNQUOTER(self._fragment) if self._fragment else ""
+
+ @cached_property
+ def raw_parts(self) -> tuple[str, ...]:
+ """A tuple containing encoded *path* parts.
+
+ ('/',) for absolute URLs if *path* is missing.
+
+ """
+ path = self._path
+ if self._netloc:
+ return ("/", *path[1:].split("/")) if path else ("/",)
+ if path and path[0] == "/":
+ return ("/", *path[1:].split("/"))
+ return tuple(path.split("/"))
+
+ @cached_property
+ def parts(self) -> tuple[str, ...]:
+ """A tuple containing decoded *path* parts.
+
+ ('/',) for absolute URLs if *path* is missing.
+
+ """
+ return tuple(UNQUOTER(part) for part in self.raw_parts)
+
+ @cached_property
+ def parent(self) -> "URL":
+ """A new URL with last part of path removed and cleaned up query and
+ fragment.
+
+ """
+ path = self._path
+ if not path or path == "/":
+ if self._fragment or self._query:
+ return from_parts(self._scheme, self._netloc, path, "", "")
+ return self
+ parts = path.split("/")
+ return from_parts(self._scheme, self._netloc, "/".join(parts[:-1]), "", "")
+
+ @cached_property
+ def raw_name(self) -> str:
+ """The last part of raw_parts."""
+ parts = self.raw_parts
+ if not self._netloc:
+ return parts[-1]
+ parts = parts[1:]
+ return parts[-1] if parts else ""
+
+ @cached_property
+ def name(self) -> str:
+ """The last part of parts."""
+ return UNQUOTER(self.raw_name)
+
+ @cached_property
+ def raw_suffix(self) -> str:
+ name = self.raw_name
+ i = name.rfind(".")
+ return name[i:] if 0 < i < len(name) - 1 else ""
+
+ @cached_property
+ def suffix(self) -> str:
+ return UNQUOTER(self.raw_suffix)
+
+ @cached_property
+ def raw_suffixes(self) -> tuple[str, ...]:
+ name = self.raw_name
+ if name.endswith("."):
+ return ()
+ name = name.lstrip(".")
+ return tuple("." + suffix for suffix in name.split(".")[1:])
+
+ @cached_property
+ def suffixes(self) -> tuple[str, ...]:
+ return tuple(UNQUOTER(suffix) for suffix in self.raw_suffixes)
+
+ def _make_child(self, paths: "Sequence[str]", encoded: bool = False) -> "URL":
+ """
+ add paths to self._path, accounting for absolute vs relative paths,
+ keep existing, but do not create new, empty segments
+ """
+ parsed: list[str] = []
+ needs_normalize: bool = False
+ for idx, path in enumerate(reversed(paths)):
+ # empty segment of last is not removed
+ last = idx == 0
+ if path and path[0] == "/":
+ raise ValueError(
+ f"Appending path {path!r} starting from slash is forbidden"
+ )
+ # We need to quote the path if it is not already encoded
+ # This cannot be done at the end because the existing
+ # path is already quoted and we do not want to double quote
+ # the existing path.
+ path = path if encoded else PATH_QUOTER(path)
+ needs_normalize |= "." in path
+ segments = path.split("/")
+ segments.reverse()
+ # remove trailing empty segment for all but the last path
+ parsed += segments[1:] if not last and segments[0] == "" else segments
+
+ if (path := self._path) and (old_segments := path.split("/")):
+ # If the old path ends with a slash, the last segment is an empty string
+ # and should be removed before adding the new path segments.
+ old = old_segments[:-1] if old_segments[-1] == "" else old_segments
+ old.reverse()
+ parsed += old
+
+ # If the netloc is present, inject a leading slash when adding a
+ # path to an absolute URL where there was none before.
+ if (netloc := self._netloc) and parsed and parsed[-1] != "":
+ parsed.append("")
+
+ parsed.reverse()
+ if not netloc or not needs_normalize:
+ return from_parts(self._scheme, netloc, "/".join(parsed), "", "")
+
+ path = "/".join(normalize_path_segments(parsed))
+ # If normalizing the path segments removed the leading slash, add it back.
+ if path and path[0] != "/":
+ path = f"/{path}"
+ return from_parts(self._scheme, netloc, path, "", "")
+
+ def with_scheme(self, scheme: str) -> "URL":
+ """Return a new URL with scheme replaced."""
+ # N.B. doesn't cleanup query/fragment
+ if not isinstance(scheme, str):
+ raise TypeError("Invalid scheme type")
+ lower_scheme = scheme.lower()
+ netloc = self._netloc
+ if not netloc and lower_scheme in SCHEME_REQUIRES_HOST:
+ msg = (
+ "scheme replacement is not allowed for "
+ f"relative URLs for the {lower_scheme} scheme"
+ )
+ raise ValueError(msg)
+ return from_parts(lower_scheme, netloc, self._path, self._query, self._fragment)
+
+ def with_user(self, user: Union[str, None]) -> "URL":
+ """Return a new URL with user replaced.
+
+ Autoencode user if needed.
+
+ Clear user/password if user is None.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if user is None:
+ password = None
+ elif isinstance(user, str):
+ user = QUOTER(user)
+ password = self.raw_password
+ else:
+ raise TypeError("Invalid user type")
+ if not (netloc := self._netloc):
+ raise ValueError("user replacement is not allowed for relative URLs")
+ encoded_host = self.host_subcomponent or ""
+ netloc = make_netloc(user, password, encoded_host, self.explicit_port)
+ return from_parts(self._scheme, netloc, self._path, self._query, self._fragment)
+
+ def with_password(self, password: Union[str, None]) -> "URL":
+ """Return a new URL with password replaced.
+
+ Autoencode password if needed.
+
+ Clear password if argument is None.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if password is None:
+ pass
+ elif isinstance(password, str):
+ password = QUOTER(password)
+ else:
+ raise TypeError("Invalid password type")
+ if not (netloc := self._netloc):
+ raise ValueError("password replacement is not allowed for relative URLs")
+ encoded_host = self.host_subcomponent or ""
+ port = self.explicit_port
+ netloc = make_netloc(self.raw_user, password, encoded_host, port)
+ return from_parts(self._scheme, netloc, self._path, self._query, self._fragment)
+
+ def with_host(self, host: str) -> "URL":
+ """Return a new URL with host replaced.
+
+ Autoencode host if needed.
+
+ Changing host for relative URLs is not allowed, use .join()
+ instead.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if not isinstance(host, str):
+ raise TypeError("Invalid host type")
+ if not (netloc := self._netloc):
+ raise ValueError("host replacement is not allowed for relative URLs")
+ if not host:
+ raise ValueError("host removing is not allowed")
+ encoded_host = _encode_host(host, validate_host=True) if host else ""
+ port = self.explicit_port
+ netloc = make_netloc(self.raw_user, self.raw_password, encoded_host, port)
+ return from_parts(self._scheme, netloc, self._path, self._query, self._fragment)
+
+ def with_port(self, port: Union[int, None]) -> "URL":
+ """Return a new URL with port replaced.
+
+ Clear port to default if None is passed.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if port is not None:
+ if isinstance(port, bool) or not isinstance(port, int):
+ raise TypeError(f"port should be int or None, got {type(port)}")
+ if not (0 <= port <= 65535):
+ raise ValueError(f"port must be between 0 and 65535, got {port}")
+ if not (netloc := self._netloc):
+ raise ValueError("port replacement is not allowed for relative URLs")
+ encoded_host = self.host_subcomponent or ""
+ netloc = make_netloc(self.raw_user, self.raw_password, encoded_host, port)
+ return from_parts(self._scheme, netloc, self._path, self._query, self._fragment)
+
+ def with_path(
+ self,
+ path: str,
+ *,
+ encoded: bool = False,
+ keep_query: bool = False,
+ keep_fragment: bool = False,
+ ) -> "URL":
+ """Return a new URL with path replaced."""
+ netloc = self._netloc
+ if not encoded:
+ path = PATH_QUOTER(path)
+ if netloc:
+ path = normalize_path(path) if "." in path else path
+ if path and path[0] != "/":
+ path = f"/{path}"
+ query = self._query if keep_query else ""
+ fragment = self._fragment if keep_fragment else ""
+ return from_parts(self._scheme, netloc, path, query, fragment)
+
+ @overload
+ def with_query(self, query: Query) -> "URL": ...
+
+ @overload
+ def with_query(self, **kwargs: QueryVariable) -> "URL": ...
+
+ def with_query(self, *args: Any, **kwargs: Any) -> "URL":
+ """Return a new URL with query part replaced.
+
+ Accepts any Mapping (e.g. dict, multidict.MultiDict instances)
+ or str, autoencode the argument if needed.
+
+ A sequence of (key, value) pairs is supported as well.
+
+ It also can take an arbitrary number of keyword arguments.
+
+ Clear query if None is passed.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ query = get_str_query(*args, **kwargs) or ""
+ return from_parts_uncached(
+ self._scheme, self._netloc, self._path, query, self._fragment
+ )
+
+ @overload
+ def extend_query(self, query: Query) -> "URL": ...
+
+ @overload
+ def extend_query(self, **kwargs: QueryVariable) -> "URL": ...
+
+ def extend_query(self, *args: Any, **kwargs: Any) -> "URL":
+ """Return a new URL with query part combined with the existing.
+
+ This method will not remove existing query parameters.
+
+ Example:
+ >>> url = URL('http://example.com/?a=1&b=2')
+ >>> url.extend_query(a=3, c=4)
+ URL('http://example.com/?a=1&b=2&a=3&c=4')
+ """
+ if not (new_query := get_str_query(*args, **kwargs)):
+ return self
+ if query := self._query:
+ # both strings are already encoded so we can use a simple
+ # string join
+ query += new_query if query[-1] == "&" else f"&{new_query}"
+ else:
+ query = new_query
+ return from_parts_uncached(
+ self._scheme, self._netloc, self._path, query, self._fragment
+ )
+
+ @overload
+ def update_query(self, query: Query) -> "URL": ...
+
+ @overload
+ def update_query(self, **kwargs: QueryVariable) -> "URL": ...
+
+ def update_query(self, *args: Any, **kwargs: Any) -> "URL":
+ """Return a new URL with query part updated.
+
+ This method will overwrite existing query parameters.
+
+ Example:
+ >>> url = URL('http://example.com/?a=1&b=2')
+ >>> url.update_query(a=3, c=4)
+ URL('http://example.com/?a=3&b=2&c=4')
+ """
+ in_query: Union[
+ str,
+ Mapping[str, QueryVariable],
+ Sequence[tuple[Union[str, istr], SimpleQuery]],
+ None,
+ ]
+ if kwargs:
+ if args:
+ msg = "Either kwargs or single query parameter must be present"
+ raise ValueError(msg)
+ in_query = kwargs
+ elif len(args) == 1:
+ in_query = args[0]
+ else:
+ raise ValueError("Either kwargs or single query parameter must be present")
+
+ if in_query is None:
+ query = ""
+ elif not in_query:
+ query = self._query
+ elif isinstance(in_query, Mapping):
+ qm: MultiDict[QueryVariable] = MultiDict(self._parsed_query)
+ qm.update(in_query)
+ query = get_str_query_from_sequence_iterable(qm.items())
+ elif isinstance(in_query, str):
+ qstr: MultiDict[str] = MultiDict(self._parsed_query)
+ qstr.update(query_to_pairs(in_query))
+ query = get_str_query_from_iterable(qstr.items())
+ elif isinstance(in_query, (bytes, bytearray, memoryview)):
+ msg = "Invalid query type: bytes, bytearray and memoryview are forbidden"
+ raise TypeError(msg)
+ elif isinstance(in_query, Sequence):
+ # We don't expect sequence values if we're given a list of pairs
+ # already; only mappings like builtin `dict` which can't have the
+ # same key pointing to multiple values are allowed to use
+ # `_query_seq_pairs`.
+ if TYPE_CHECKING:
+ in_query = cast(
+ Sequence[tuple[Union[str, istr], SimpleQuery]], in_query
+ )
+ qs: MultiDict[SimpleQuery] = MultiDict(self._parsed_query)
+ qs.update(in_query)
+ query = get_str_query_from_iterable(qs.items())
+ else:
+ raise TypeError(
+ "Invalid query type: only str, mapping or "
+ "sequence of (key, value) pairs is allowed"
+ )
+ return from_parts_uncached(
+ self._scheme, self._netloc, self._path, query, self._fragment
+ )
+
+ def without_query_params(self, *query_params: str) -> "URL":
+ """Remove some keys from query part and return new URL."""
+ params_to_remove = set(query_params) & self.query.keys()
+ if not params_to_remove:
+ return self
+ return self.with_query(
+ tuple(
+ (name, value)
+ for name, value in self.query.items()
+ if name not in params_to_remove
+ )
+ )
+
+ def with_fragment(self, fragment: Union[str, None]) -> "URL":
+ """Return a new URL with fragment replaced.
+
+ Autoencode fragment if needed.
+
+ Clear fragment to default if None is passed.
+
+ """
+ # N.B. doesn't cleanup query/fragment
+ if fragment is None:
+ raw_fragment = ""
+ elif not isinstance(fragment, str):
+ raise TypeError("Invalid fragment type")
+ else:
+ raw_fragment = FRAGMENT_QUOTER(fragment)
+ if self._fragment == raw_fragment:
+ return self
+ return from_parts(
+ self._scheme, self._netloc, self._path, self._query, raw_fragment
+ )
+
+ def with_name(
+ self,
+ name: str,
+ *,
+ keep_query: bool = False,
+ keep_fragment: bool = False,
+ ) -> "URL":
+ """Return a new URL with name (last part of path) replaced.
+
+ Query and fragment parts are cleaned up.
+
+ Name is encoded if needed.
+
+ """
+ # N.B. DOES cleanup query/fragment
+ if not isinstance(name, str):
+ raise TypeError("Invalid name type")
+ if "/" in name:
+ raise ValueError("Slash in name is not allowed")
+ name = PATH_QUOTER(name)
+ if name in (".", ".."):
+ raise ValueError(". and .. values are forbidden")
+ parts = list(self.raw_parts)
+ if netloc := self._netloc:
+ if len(parts) == 1:
+ parts.append(name)
+ else:
+ parts[-1] = name
+ parts[0] = "" # replace leading '/'
+ else:
+ parts[-1] = name
+ if parts[0] == "/":
+ parts[0] = "" # replace leading '/'
+
+ query = self._query if keep_query else ""
+ fragment = self._fragment if keep_fragment else ""
+ return from_parts(self._scheme, netloc, "/".join(parts), query, fragment)
+
+ def with_suffix(
+ self,
+ suffix: str,
+ *,
+ keep_query: bool = False,
+ keep_fragment: bool = False,
+ ) -> "URL":
+ """Return a new URL with suffix (file extension of name) replaced.
+
+ Query and fragment parts are cleaned up.
+
+ suffix is encoded if needed.
+ """
+ if not isinstance(suffix, str):
+ raise TypeError("Invalid suffix type")
+ if suffix and not suffix[0] == "." or suffix == "." or "/" in suffix:
+ raise ValueError(f"Invalid suffix {suffix!r}")
+ name = self.raw_name
+ if not name:
+ raise ValueError(f"{self!r} has an empty name")
+ old_suffix = self.raw_suffix
+ suffix = PATH_QUOTER(suffix)
+ name = name + suffix if not old_suffix else name[: -len(old_suffix)] + suffix
+ if name in (".", ".."):
+ raise ValueError(". and .. values are forbidden")
+ parts = list(self.raw_parts)
+ if netloc := self._netloc:
+ if len(parts) == 1:
+ parts.append(name)
+ else:
+ parts[-1] = name
+ parts[0] = "" # replace leading '/'
+ else:
+ parts[-1] = name
+ if parts[0] == "/":
+ parts[0] = "" # replace leading '/'
+
+ query = self._query if keep_query else ""
+ fragment = self._fragment if keep_fragment else ""
+ return from_parts(self._scheme, netloc, "/".join(parts), query, fragment)
+
+ def join(self, url: "URL") -> "URL":
+ """Join URLs
+
+ Construct a full (“absolute”) URL by combining a “base URL”
+ (self) with another URL (url).
+
+ Informally, this uses components of the base URL, in
+ particular the addressing scheme, the network location and
+ (part of) the path, to provide missing components in the
+ relative URL.
+
+ """
+ if type(url) is not URL:
+ raise TypeError("url should be URL")
+
+ scheme = url._scheme or self._scheme
+ if scheme != self._scheme or scheme not in USES_RELATIVE:
+ return url
+
+ # scheme is in uses_authority as uses_authority is a superset of uses_relative
+ if (join_netloc := url._netloc) and scheme in USES_AUTHORITY:
+ return from_parts(scheme, join_netloc, url._path, url._query, url._fragment)
+
+ orig_path = self._path
+ if join_path := url._path:
+ if join_path[0] == "/":
+ path = join_path
+ elif not orig_path:
+ path = f"/{join_path}"
+ elif orig_path[-1] == "/":
+ path = f"{orig_path}{join_path}"
+ else:
+ # …
+ # and relativizing ".."
+ # parts[0] is / for absolute urls,
+ # this join will add a double slash there
+ path = "/".join([*self.parts[:-1], ""]) + join_path
+ # which has to be removed
+ if orig_path[0] == "/":
+ path = path[1:]
+ path = normalize_path(path) if "." in path else path
+ else:
+ path = orig_path
+
+ return from_parts(
+ scheme,
+ self._netloc,
+ path,
+ url._query if join_path or url._query else self._query,
+ url._fragment if join_path or url._fragment else self._fragment,
+ )
+
+ def joinpath(self, *other: str, encoded: bool = False) -> "URL":
+ """Return a new URL with the elements in other appended to the path."""
+ return self._make_child(other, encoded=encoded)
+
+ def human_repr(self) -> str:
+ """Return decoded human readable string for URL representation."""
+ user = human_quote(self.user, "#/:?@[]")
+ password = human_quote(self.password, "#/:?@[]")
+ if (host := self.host) and ":" in host:
+ host = f"[{host}]"
+ path = human_quote(self.path, "#?")
+ if TYPE_CHECKING:
+ assert path is not None
+ query_string = "&".join(
+ "{}={}".format(human_quote(k, "#&+;="), human_quote(v, "#&+;="))
+ for k, v in self.query.items()
+ )
+ fragment = human_quote(self.fragment, "")
+ if TYPE_CHECKING:
+ assert fragment is not None
+ netloc = make_netloc(user, password, host, self.explicit_port)
+ return unsplit_result(self._scheme, netloc, path, query_string, fragment)
+
+
+_DEFAULT_IDNA_SIZE = 256
+_DEFAULT_ENCODE_SIZE = 512
+
+
+@lru_cache(_DEFAULT_IDNA_SIZE)
+def _idna_decode(raw: str) -> str:
+ try:
+ return idna.decode(raw.encode("ascii"))
+ except UnicodeError: # e.g. '::1'
+ return raw.encode("ascii").decode("idna")
+
+
+@lru_cache(_DEFAULT_IDNA_SIZE)
+def _idna_encode(host: str) -> str:
+ try:
+ return idna.encode(host, uts46=True).decode("ascii")
+ except UnicodeError:
+ return host.encode("idna").decode("ascii")
+
+
+@lru_cache(_DEFAULT_ENCODE_SIZE)
+def _encode_host(host: str, validate_host: bool) -> str:
+ """Encode host part of URL."""
+ # If the host ends with a digit or contains a colon, its likely
+ # an IP address.
+ if host and (host[-1].isdigit() or ":" in host):
+ raw_ip, sep, zone = host.partition("%")
+ # If it looks like an IP, we check with _ip_compressed_version
+ # and fall-through if its not an IP address. This is a performance
+ # optimization to avoid parsing IP addresses as much as possible
+ # because it is orders of magnitude slower than almost any other
+ # operation this library does.
+ # Might be an IP address, check it
+ #
+ # IP Addresses can look like:
+ # https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.2
+ # - 127.0.0.1 (last character is a digit)
+ # - 2001:db8::ff00:42:8329 (contains a colon)
+ # - 2001:db8::ff00:42:8329%eth0 (contains a colon)
+ # - [2001:db8::ff00:42:8329] (contains a colon -- brackets should
+ # have been removed before it gets here)
+ # Rare IP Address formats are not supported per:
+ # https://datatracker.ietf.org/doc/html/rfc3986#section-7.4
+ #
+ # IP parsing is slow, so its wrapped in an LRU
+ try:
+ ip = ip_address(raw_ip)
+ except ValueError:
+ pass
+ else:
+ # These checks should not happen in the
+ # LRU to keep the cache size small
+ host = ip.compressed
+ if ip.version == 6:
+ return f"[{host}%{zone}]" if sep else f"[{host}]"
+ return f"{host}%{zone}" if sep else host
+
+ # IDNA encoding is slow, skip it for ASCII-only strings
+ if host.isascii():
+ # Check for invalid characters explicitly; _idna_encode() does this
+ # for non-ascii host names.
+ host = host.lower()
+ if validate_host and (invalid := NOT_REG_NAME.search(host)):
+ value, pos, extra = invalid.group(), invalid.start(), ""
+ if value == "@" or (value == ":" and "@" in host[pos:]):
+ # this looks like an authority string
+ extra = (
+ ", if the value includes a username or password, "
+ "use 'authority' instead of 'host'"
+ )
+ raise ValueError(
+ f"Host {host!r} cannot contain {value!r} (at position {pos}){extra}"
+ ) from None
+ return host
+
+ return _idna_encode(host)
+
+
+@rewrite_module
+def cache_clear() -> None:
+ """Clear all LRU caches."""
+ _idna_encode.cache_clear()
+ _idna_decode.cache_clear()
+ _encode_host.cache_clear()
+
+
+@rewrite_module
+def cache_info() -> CacheInfo:
+ """Report cache statistics."""
+ return {
+ "idna_encode": _idna_encode.cache_info(),
+ "idna_decode": _idna_decode.cache_info(),
+ "ip_address": _encode_host.cache_info(),
+ "host_validate": _encode_host.cache_info(),
+ "encode_host": _encode_host.cache_info(),
+ }
+
+
+@rewrite_module
+def cache_configure(
+ *,
+ idna_encode_size: Union[int, None] = _DEFAULT_IDNA_SIZE,
+ idna_decode_size: Union[int, None] = _DEFAULT_IDNA_SIZE,
+ ip_address_size: Union[int, None, UndefinedType] = UNDEFINED,
+ host_validate_size: Union[int, None, UndefinedType] = UNDEFINED,
+ encode_host_size: Union[int, None, UndefinedType] = UNDEFINED,
+) -> None:
+ """Configure LRU cache sizes."""
+ global _idna_decode, _idna_encode, _encode_host
+ # ip_address_size, host_validate_size are no longer
+ # used, but are kept for backwards compatibility.
+ if ip_address_size is not UNDEFINED or host_validate_size is not UNDEFINED:
+ warnings.warn(
+ "cache_configure() no longer accepts the "
+ "ip_address_size or host_validate_size arguments, "
+ "they are used to set the encode_host_size instead "
+ "and will be removed in the future",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if encode_host_size is not None:
+ for size in (ip_address_size, host_validate_size):
+ if size is None:
+ encode_host_size = None
+ elif encode_host_size is UNDEFINED:
+ if size is not UNDEFINED:
+ encode_host_size = size
+ elif size is not UNDEFINED:
+ if TYPE_CHECKING:
+ assert isinstance(size, int)
+ assert isinstance(encode_host_size, int)
+ encode_host_size = max(size, encode_host_size)
+ if encode_host_size is UNDEFINED:
+ encode_host_size = _DEFAULT_ENCODE_SIZE
+
+ _encode_host = lru_cache(encode_host_size)(_encode_host.__wrapped__)
+ _idna_decode = lru_cache(idna_decode_size)(_idna_decode.__wrapped__)
+ _idna_encode = lru_cache(idna_encode_size)(_idna_encode.__wrapped__)
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/py.typed" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/py.typed"
new file mode 100644
index 0000000..dcf2c80
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Lib/site-packages/yarl/py.typed"
@@ -0,0 +1 @@
+# Placeholder
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Scripts/normalizer.exe" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Scripts/normalizer.exe"
new file mode 100644
index 0000000..10c4bc1
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Scripts/normalizer.exe"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Scripts/tqdm.exe" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Scripts/tqdm.exe"
new file mode 100644
index 0000000..1c4d562
--- /dev/null
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/Change_password/venv_build/Scripts/tqdm.exe"
Binary files differ
diff --git "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/\351\200\240\346\225\260\350\204\232\346\234\2542/\345\215\216\344\270\234\345\270\210\350\214\203\345\244\247\345\255\246\344\272\214\346\234\237/\345\271\266\345\217\221\345\205\245\351\251\273\347\254\274\344\275\215.py" "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/\351\200\240\346\225\260\350\204\232\346\234\2542/\345\215\216\344\270\234\345\270\210\350\214\203\345\244\247\345\255\246\344\272\214\346\234\237/\345\271\266\345\217\221\345\205\245\351\251\273\347\254\274\344\275\215.py"
index 5beabfc..923f493 100644
--- "a/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/\351\200\240\346\225\260\350\204\232\346\234\2542/\345\215\216\344\270\234\345\270\210\350\214\203\345\244\247\345\255\246\344\272\214\346\234\237/\345\271\266\345\217\221\345\205\245\351\251\273\347\254\274\344\275\215.py"
+++ "b/\346\265\213\350\257\225\347\273\204/\350\204\232\346\234\254/\351\200\240\346\225\260\350\204\232\346\234\2542/\345\215\216\344\270\234\345\270\210\350\214\203\345\244\247\345\255\246\344\272\214\346\234\237/\345\271\266\345\217\221\345\205\245\351\251\273\347\254\274\344\275\215.py"
@@ -42,12 +42,12 @@
apiname = "入驻笼位"
url = "http://192.168.6.190:5561/api/base/cage/cage/enterCage"
headers = {
- "token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NjcyMzc1MDgsInVzZXJuYW1lIjoiZ2x5In0.2N0rQ7Oy1B-Wg_fnywOrcDelYnCe5JOpd7-vwu_2H6U",
+ "token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE3NjgwMzY0MzksInVzZXJuYW1lIjoiZ2x5In0.-LYYNbSJ-zb5RKaiBiPjntgUfnGRfvajA2B1N2v7a-o",
"Content-Type": "application/json"
}
NUM_WORKERS = 100
-TOTAL_REQUESTS = 244481
+TOTAL_REQUESTS = 10000
MAX_RETRIES = 3
REQUEST_TIMEOUT = 60
OUTPUT_DIR = './load_test_report'
@@ -60,6 +60,90 @@
# 全局变量,存储从数据库获取的笼位列表
cage_list = []
+
+
+class DataManager:
+ """数据管理器,负责从数据库加载用户和课题组信息"""
+
+ def __init__(self):
+ self.user_data = [] # 存储用户ID、用户名、课题组ID和课题组名称
+
+ def load_user_and_group_data(self):
+ """从数据库加载用户和课题组信息"""
+ try:
+ conn = pymysql.connect(**DB_CONFIG)
+
+ # 获取用户ID、用户名和对应的课题组ID
+ with conn.cursor() as cursor:
+ cursor.execute("""
+ SELECT
+ su.id as user_id,
+ su.name as user_name,
+ su.research_group_ids as research_group_ids
+ FROM sys_user su
+ WHERE su.research_group_ids IS NOT NULL AND su.research_group_ids != ''
+ """)
+
+ results = cursor.fetchall()
+
+ # 处理查询结果
+ for row in results:
+ # 处理多个课题组ID(用逗号分隔的情况)
+ if row['research_group_ids'] and ',' in row['research_group_ids']:
+ group_ids = [gid.strip() for gid in row['research_group_ids'].split(',') if gid.strip()]
+ elif row['research_group_ids']:
+ group_ids = [row['research_group_ids'].strip()]
+ else:
+ continue
+
+ # 获取所有相关的课题组信息
+ group_info_list = []
+ for group_id in group_ids:
+ # 查找对应的课题组名称,使用正确的表名 l_research_group
+ cursor.execute("SELECT id, name FROM l_research_group WHERE id = %s", (group_id,))
+ group_info = cursor.fetchone()
+ if group_info:
+ group_info_list.append({
+ 'group_id': group_info['id'],
+ 'group_name': group_info['name']
+ })
+
+ if group_info_list:
+ self.user_data.append({
+ 'user_id': row['user_id'],
+ 'user_name': row['user_name'],
+ 'groups': group_info_list
+ })
+
+ print(f"成功加载 {len(self.user_data)} 个有效用户数据")
+ conn.close()
+ return len(self.user_data) > 0
+
+ except Exception as e:
+ print(f"数据库连接失败: {e}")
+ return False
+
+ def get_random_user_and_group(self):
+ """随机获取一个用户和对应的课题组信息"""
+ if not self.user_data:
+ return None, None, None, None
+
+ # 随机选择一个用户
+ user = random.choice(self.user_data)
+ user_id = user['user_id']
+ user_name = user['user_name']
+
+ # 随机选择一个课题组
+ group = random.choice(user['groups'])
+ group_id = group['group_id']
+ group_name = group['group_name']
+
+ return user_id, user_name, group_id, group_name
+
+
+# 创建数据管理器实例
+data_manager = DataManager()
+
def fetch_cages_from_db():
@@ -112,7 +196,7 @@
def create_animal_data(idx: int):
- """创建动物数据,使用动态获取的笼位信息"""
+ """创建动物数据,使用动态获取的笼位、用户和课题组信息"""
random_code = RandomUtil.generate_random_number_string(0, 999999999)
random_femaleNum = RandomUtil.generate_random_number_string(1, 5)
random_maleNum = RandomUtil.generate_random_number_string(1, 5)
@@ -120,6 +204,15 @@
# 获取笼位信息
cage_info = get_random_cage(idx)
+
+ # 从数据管理器获取随机的用户和课题组信息
+ user_id, user_name, group_id, group_name = data_manager.get_random_user_and_group()
+
+ # 如果无法获取用户和课题组信息,使用默认值
+ if not user_id or not group_id:
+ user_id = "1995379969088860162" # 默认用户ID
+ group_id = "1995379941721026561" # 默认课题组ID
+ group_name = "hyb课题组2" # 默认课题组名称
return {
"enterCageList": [
@@ -130,11 +223,11 @@
},
"cageStatus": "2", # 笼具状态
"user": {
- "id": "1995379969088860162" # 操作用户ID
+ "id": user_id # 用户ID
},
"researchGroup": {
- "id": "1995379941721026561",
- "name": "hyb课题组2" # 课题组信息
+ "id": group_id,
+ "name": group_name # 课题组信息
},
"femaleNum": random_femaleNum, # 雌性数量
"maleNum": random_maleNum, # 雄性数量
@@ -253,6 +346,11 @@
print("错误: 无法获取笼位信息,压测终止")
return
+ # 从数据库获取用户和课题组信息
+ print("正在从数据库获取用户和课题组信息...")
+ if not data_manager.load_user_and_group_data():
+ print("警告: 无法获取用户和课题组信息,将使用默认值")
+
print(f"获取到 {len(cage_list)} 个笼位,将进行{apiname}压测")
# 动态加载报告生成器模块(支持中文文件名)
--
Gitblit v1.9.1