From 742948f242523c5dd7158617a372d4f1b959ff19 Mon Sep 17 00:00:00 2001 From: Charles Holtzkampf Date: Mon, 13 Jan 2025 13:20:59 +0000 Subject: [PATCH 1/2] updated --- bin/activate | 87 + bin/activate.csh | 55 + bin/activate.fish | 103 + bin/activate.nu | 96 + bin/activate.ps1 | 61 + bin/activate_this.py | 38 + bin/filetype | 8 + bin/httpx | 8 + bin/normalizer | 8 + bin/pip | 8 + bin/pip-3.11 | 8 + bin/pip3 | 8 + bin/pip3.11 | 8 + bin/python | 1 + bin/python3 | 1 + bin/python3.11 | 1 + bin/wheel | 8 + bin/wheel-3.11 | 8 + bin/wheel3 | 8 + bin/wheel3.11 | 8 + build/lib/twikit/__init__.py | 31 + build/lib/twikit/_captcha/__init__.py | 2 + build/lib/twikit/_captcha/base.py | 111 + build/lib/twikit/_captcha/capsolver.py | 95 + build/lib/twikit/bookmark.py | 64 + build/lib/twikit/client/__init__.py | 4 + build/lib/twikit/client/client.py | 4295 ++++++++ build/lib/twikit/client/gql.py | 705 ++ build/lib/twikit/client/v11.py | 512 + build/lib/twikit/community.py | 282 + build/lib/twikit/constants.py | 260 + build/lib/twikit/errors.py | 110 + build/lib/twikit/geo.py | 82 + build/lib/twikit/group.py | 259 + build/lib/twikit/guest/__init__.py | 3 + build/lib/twikit/guest/client.py | 393 + build/lib/twikit/guest/tweet.py | 225 + build/lib/twikit/guest/user.py | 196 + build/lib/twikit/list.py | 255 + build/lib/twikit/message.py | 143 + build/lib/twikit/notification.py | 47 + build/lib/twikit/streaming.py | 269 + build/lib/twikit/trend.py | 93 + build/lib/twikit/tweet.py | 694 ++ build/lib/twikit/user.py | 521 + build/lib/twikit/utils.py | 394 + .../twikit/x_client_transaction/__init__.py | 8 + .../x_client_transaction/cubic_curve.py | 48 + .../x_client_transaction/interpolate.py | 23 + .../twikit/x_client_transaction/rotation.py | 27 + .../x_client_transaction/transaction.py | 164 + .../lib/twikit/x_client_transaction/utils.py | 84 + example_project/test_twikit.py | 18 + examples/delete_all_tweets.py | 40 - examples/dm_auto_reply.py | 41 - examples/download_tweet_media.py | 23 - examples/example.py | 137 - examples/guest.py | 26 - examples/listen_for_new_tweets.py | 37 - .../site-packages/_distutils_hack/__init__.py | 239 + .../site-packages/_distutils_hack/override.py | 1 + lib/python3.11/site-packages/_virtualenv.pth | 1 + lib/python3.11/site-packages/_virtualenv.py | 103 + .../anyio-4.8.0.dist-info/INSTALLER | 1 + .../anyio-4.8.0.dist-info/LICENSE | 20 + .../anyio-4.8.0.dist-info/METADATA | 104 + .../anyio-4.8.0.dist-info/RECORD | 86 + .../site-packages/anyio-4.8.0.dist-info/WHEEL | 5 + .../anyio-4.8.0.dist-info/entry_points.txt | 2 + .../anyio-4.8.0.dist-info/top_level.txt | 1 + .../site-packages/anyio/__init__.py | 77 + .../site-packages/anyio/_backends/__init__.py | 0 .../site-packages/anyio/_backends/_asyncio.py | 2807 ++++++ .../site-packages/anyio/_backends/_trio.py | 1334 +++ .../site-packages/anyio/_core/__init__.py | 0 .../anyio/_core/_asyncio_selector_thread.py | 167 + .../site-packages/anyio/_core/_eventloop.py | 166 + .../site-packages/anyio/_core/_exceptions.py | 126 + .../site-packages/anyio/_core/_fileio.py | 729 ++ .../site-packages/anyio/_core/_resources.py | 18 + .../site-packages/anyio/_core/_signals.py | 27 + .../site-packages/anyio/_core/_sockets.py | 787 ++ .../site-packages/anyio/_core/_streams.py | 52 + .../anyio/_core/_subprocesses.py | 196 + .../anyio/_core/_synchronization.py | 732 ++ .../site-packages/anyio/_core/_tasks.py | 158 + .../site-packages/anyio/_core/_testing.py | 78 + .../site-packages/anyio/_core/_typedattr.py | 81 + .../site-packages/anyio/abc/__init__.py | 55 + .../site-packages/anyio/abc/_eventloop.py | 376 + .../site-packages/anyio/abc/_resources.py | 33 + .../site-packages/anyio/abc/_sockets.py | 194 + .../site-packages/anyio/abc/_streams.py | 203 + .../site-packages/anyio/abc/_subprocesses.py | 79 + .../site-packages/anyio/abc/_tasks.py | 101 + .../site-packages/anyio/abc/_testing.py | 65 + .../site-packages/anyio/from_thread.py | 527 + .../site-packages/anyio/lowlevel.py | 161 + lib/python3.11/site-packages/anyio/py.typed | 0 .../site-packages/anyio/pytest_plugin.py | 191 + .../site-packages/anyio/streams/__init__.py | 0 .../site-packages/anyio/streams/buffered.py | 119 + .../site-packages/anyio/streams/file.py | 148 + .../site-packages/anyio/streams/memory.py | 317 + .../site-packages/anyio/streams/stapled.py | 141 + .../site-packages/anyio/streams/text.py | 147 + .../site-packages/anyio/streams/tls.py | 337 + .../site-packages/anyio/to_interpreter.py | 218 + .../site-packages/anyio/to_process.py | 258 + .../site-packages/anyio/to_thread.py | 69 + .../beautifulsoup4-4.12.3.dist-info/INSTALLER | 1 + .../beautifulsoup4-4.12.3.dist-info/METADATA | 122 + .../beautifulsoup4-4.12.3.dist-info/RECORD | 79 + .../beautifulsoup4-4.12.3.dist-info/REQUESTED | 0 .../beautifulsoup4-4.12.3.dist-info/WHEEL | 4 + .../licenses/AUTHORS | 49 + .../licenses/LICENSE | 31 + lib/python3.11/site-packages/bs4/__init__.py | 840 ++ .../site-packages/bs4/builder/__init__.py | 636 ++ .../site-packages/bs4/builder/_html5lib.py | 481 + .../site-packages/bs4/builder/_htmlparser.py | 387 + .../site-packages/bs4/builder/_lxml.py | 388 + lib/python3.11/site-packages/bs4/css.py | 280 + lib/python3.11/site-packages/bs4/dammit.py | 1095 +++ lib/python3.11/site-packages/bs4/diagnose.py | 233 + lib/python3.11/site-packages/bs4/element.py | 2435 +++++ lib/python3.11/site-packages/bs4/formatter.py | 185 + .../site-packages/bs4/tests/__init__.py | 1177 +++ ...mized-bs4_fuzzer-4670634698080256.testcase | 1 + ...mized-bs4_fuzzer-4818336571064320.testcase | 1 + ...mized-bs4_fuzzer-4999465949331456.testcase | 1 + ...mized-bs4_fuzzer-5000587759190016.testcase | Bin 0 -> 15347 bytes ...mized-bs4_fuzzer-5167584867909632.testcase | Bin 0 -> 19469 bytes ...mized-bs4_fuzzer-5270998950477824.testcase | Bin 0 -> 12 bytes ...mized-bs4_fuzzer-5375146639360000.testcase | 1 + ...mized-bs4_fuzzer-5492400320282624.testcase | Bin 0 -> 11502 bytes ...mized-bs4_fuzzer-5703933063462912.testcase | 2 + ...mized-bs4_fuzzer-5843991618256896.testcase | 1 + ...mized-bs4_fuzzer-5984173902397440.testcase | Bin 0 -> 51495 bytes ...mized-bs4_fuzzer-6124268085182464.testcase | 1 + ...mized-bs4_fuzzer-6241471367348224.testcase | 1 + ...mized-bs4_fuzzer-6306874195312640.testcase | 1 + ...mized-bs4_fuzzer-6450958476902400.testcase | Bin 0 -> 3546 bytes ...mized-bs4_fuzzer-6600557255327744.testcase | Bin 0 -> 124 bytes ...0c8ed8bcd0785b67000fcd5dea1d33f08.testcase | Bin 0 -> 2607 bytes ...a2b26f13537b68d3794b0478a4090ee4a.testcase | Bin 0 -> 103 bytes .../site-packages/bs4/tests/test_builder.py | 29 + .../bs4/tests/test_builder_registry.py | 137 + .../site-packages/bs4/tests/test_css.py | 487 + .../site-packages/bs4/tests/test_dammit.py | 370 + .../site-packages/bs4/tests/test_docs.py | 38 + .../site-packages/bs4/tests/test_element.py | 74 + .../site-packages/bs4/tests/test_formatter.py | 113 + .../site-packages/bs4/tests/test_fuzz.py | 176 + .../site-packages/bs4/tests/test_html5lib.py | 224 + .../bs4/tests/test_htmlparser.py | 148 + .../site-packages/bs4/tests/test_lxml.py | 203 + .../bs4/tests/test_navigablestring.py | 144 + .../bs4/tests/test_pageelement.py | 378 + .../site-packages/bs4/tests/test_soup.py | 504 + .../site-packages/bs4/tests/test_tag.py | 234 + .../site-packages/bs4/tests/test_tree.py | 1304 +++ .../certifi-2024.12.14.dist-info/INSTALLER | 1 + .../certifi-2024.12.14.dist-info/LICENSE | 20 + .../certifi-2024.12.14.dist-info/METADATA | 68 + .../certifi-2024.12.14.dist-info/RECORD | 14 + .../certifi-2024.12.14.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../site-packages/certifi/__init__.py | 4 + .../site-packages/certifi/__main__.py | 12 + .../site-packages/certifi/cacert.pem | 4856 +++++++++ lib/python3.11/site-packages/certifi/core.py | 114 + lib/python3.11/site-packages/certifi/py.typed | 0 .../INSTALLER | 1 + .../LICENSE | 21 + .../METADATA | 721 ++ .../charset_normalizer-3.4.1.dist-info/RECORD | 35 + .../charset_normalizer-3.4.1.dist-info/WHEEL | 5 + .../entry_points.txt | 2 + .../top_level.txt | 1 + .../charset_normalizer/__init__.py | 48 + .../charset_normalizer/__main__.py | 6 + .../site-packages/charset_normalizer/api.py | 668 ++ .../site-packages/charset_normalizer/cd.py | 395 + .../charset_normalizer/cli/__init__.py | 8 + .../charset_normalizer/cli/__main__.py | 321 + .../charset_normalizer/constant.py | 1998 ++++ .../charset_normalizer/legacy.py | 66 + .../site-packages/charset_normalizer/md.py | 630 ++ .../charset_normalizer/models.py | 360 + .../site-packages/charset_normalizer/py.typed | 0 .../site-packages/charset_normalizer/utils.py | 408 + .../charset_normalizer/version.py | 8 + .../site-packages/distutils-precedence.pth | 1 + lib/python3.11/site-packages/easy-install.pth | 1 + .../filetype-1.2.0.dist-info/INSTALLER | 1 + .../filetype-1.2.0.dist-info/LICENSE | 21 + .../filetype-1.2.0.dist-info/METADATA | 213 + .../filetype-1.2.0.dist-info/RECORD | 42 + .../filetype-1.2.0.dist-info/REQUESTED | 0 .../filetype-1.2.0.dist-info/WHEEL | 6 + .../filetype-1.2.0.dist-info/entry_points.txt | 3 + .../filetype-1.2.0.dist-info/top_level.txt | 1 + .../filetype-1.2.0.dist-info/zip-safe | 1 + .../site-packages/filetype/__init__.py | 10 + .../site-packages/filetype/__main__.py | 37 + .../site-packages/filetype/filetype.py | 98 + .../site-packages/filetype/helpers.py | 140 + .../site-packages/filetype/match.py | 155 + .../site-packages/filetype/types/__init__.py | 118 + .../filetype/types/application.py | 22 + .../site-packages/filetype/types/archive.py | 687 ++ .../site-packages/filetype/types/audio.py | 212 + .../site-packages/filetype/types/base.py | 29 + .../site-packages/filetype/types/document.py | 256 + .../site-packages/filetype/types/font.py | 115 + .../site-packages/filetype/types/image.py | 383 + .../site-packages/filetype/types/isobmff.py | 33 + .../site-packages/filetype/types/video.py | 223 + .../site-packages/filetype/utils.py | 84 + .../h11-0.14.0.dist-info/INSTALLER | 1 + .../h11-0.14.0.dist-info/LICENSE.txt | 22 + .../h11-0.14.0.dist-info/METADATA | 193 + .../site-packages/h11-0.14.0.dist-info/RECORD | 52 + .../site-packages/h11-0.14.0.dist-info/WHEEL | 5 + .../h11-0.14.0.dist-info/top_level.txt | 1 + lib/python3.11/site-packages/h11/__init__.py | 62 + lib/python3.11/site-packages/h11/_abnf.py | 132 + .../site-packages/h11/_connection.py | 633 ++ lib/python3.11/site-packages/h11/_events.py | 369 + lib/python3.11/site-packages/h11/_headers.py | 278 + lib/python3.11/site-packages/h11/_readers.py | 247 + .../site-packages/h11/_receivebuffer.py | 153 + lib/python3.11/site-packages/h11/_state.py | 367 + lib/python3.11/site-packages/h11/_util.py | 135 + lib/python3.11/site-packages/h11/_version.py | 16 + lib/python3.11/site-packages/h11/_writers.py | 145 + lib/python3.11/site-packages/h11/py.typed | 1 + .../site-packages/h11/tests/__init__.py | 0 .../site-packages/h11/tests/data/test-file | 1 + .../site-packages/h11/tests/helpers.py | 101 + .../h11/tests/test_against_stdlib_http.py | 115 + .../h11/tests/test_connection.py | 1122 +++ .../site-packages/h11/tests/test_events.py | 150 + .../site-packages/h11/tests/test_headers.py | 157 + .../site-packages/h11/tests/test_helpers.py | 32 + .../site-packages/h11/tests/test_io.py | 572 ++ .../h11/tests/test_receivebuffer.py | 135 + .../site-packages/h11/tests/test_state.py | 271 + .../site-packages/h11/tests/test_util.py | 112 + .../httpcore-1.0.7.dist-info/INSTALLER | 1 + .../httpcore-1.0.7.dist-info/METADATA | 616 ++ .../httpcore-1.0.7.dist-info/RECORD | 68 + .../httpcore-1.0.7.dist-info/WHEEL | 4 + .../licenses/LICENSE.md | 27 + .../site-packages/httpcore/__init__.py | 140 + lib/python3.11/site-packages/httpcore/_api.py | 94 + .../site-packages/httpcore/_async/__init__.py | 39 + .../httpcore/_async/connection.py | 222 + .../httpcore/_async/connection_pool.py | 420 + .../site-packages/httpcore/_async/http11.py | 379 + .../site-packages/httpcore/_async/http2.py | 583 ++ .../httpcore/_async/http_proxy.py | 367 + .../httpcore/_async/interfaces.py | 137 + .../httpcore/_async/socks_proxy.py | 341 + .../httpcore/_backends/__init__.py | 0 .../site-packages/httpcore/_backends/anyio.py | 146 + .../site-packages/httpcore/_backends/auto.py | 52 + .../site-packages/httpcore/_backends/base.py | 101 + .../site-packages/httpcore/_backends/mock.py | 143 + .../site-packages/httpcore/_backends/sync.py | 241 + .../site-packages/httpcore/_backends/trio.py | 159 + .../site-packages/httpcore/_exceptions.py | 81 + .../site-packages/httpcore/_models.py | 516 + lib/python3.11/site-packages/httpcore/_ssl.py | 9 + .../site-packages/httpcore/_sync/__init__.py | 39 + .../httpcore/_sync/connection.py | 222 + .../httpcore/_sync/connection_pool.py | 420 + .../site-packages/httpcore/_sync/http11.py | 379 + .../site-packages/httpcore/_sync/http2.py | 583 ++ .../httpcore/_sync/http_proxy.py | 367 + .../httpcore/_sync/interfaces.py | 137 + .../httpcore/_sync/socks_proxy.py | 341 + .../httpcore/_synchronization.py | 318 + .../site-packages/httpcore/_trace.py | 107 + .../site-packages/httpcore/_utils.py | 37 + .../site-packages/httpcore/py.typed | 0 .../httpx-0.28.1.dist-info/INSTALLER | 1 + .../httpx-0.28.1.dist-info/METADATA | 203 + .../httpx-0.28.1.dist-info/RECORD | 55 + .../httpx-0.28.1.dist-info/REQUESTED | 0 .../httpx-0.28.1.dist-info/WHEEL | 4 + .../httpx-0.28.1.dist-info/entry_points.txt | 2 + .../licenses/LICENSE.md | 12 + .../site-packages/httpx/__init__.py | 105 + .../site-packages/httpx/__version__.py | 3 + lib/python3.11/site-packages/httpx/_api.py | 438 + lib/python3.11/site-packages/httpx/_auth.py | 348 + lib/python3.11/site-packages/httpx/_client.py | 2019 ++++ lib/python3.11/site-packages/httpx/_config.py | 248 + .../site-packages/httpx/_content.py | 240 + .../site-packages/httpx/_decoders.py | 393 + .../site-packages/httpx/_exceptions.py | 379 + lib/python3.11/site-packages/httpx/_main.py | 506 + lib/python3.11/site-packages/httpx/_models.py | 1277 +++ .../site-packages/httpx/_multipart.py | 300 + .../site-packages/httpx/_status_codes.py | 162 + .../httpx/_transports/__init__.py | 15 + .../site-packages/httpx/_transports/asgi.py | 187 + .../site-packages/httpx/_transports/base.py | 86 + .../httpx/_transports/default.py | 406 + .../site-packages/httpx/_transports/mock.py | 43 + .../site-packages/httpx/_transports/wsgi.py | 149 + lib/python3.11/site-packages/httpx/_types.py | 114 + .../site-packages/httpx/_urlparse.py | 527 + lib/python3.11/site-packages/httpx/_urls.py | 641 ++ lib/python3.11/site-packages/httpx/_utils.py | 242 + lib/python3.11/site-packages/httpx/py.typed | 0 .../idna-3.10.dist-info/INSTALLER | 1 + .../idna-3.10.dist-info/LICENSE.md | 31 + .../idna-3.10.dist-info/METADATA | 250 + .../site-packages/idna-3.10.dist-info/RECORD | 22 + .../site-packages/idna-3.10.dist-info/WHEEL | 4 + lib/python3.11/site-packages/idna/__init__.py | 45 + lib/python3.11/site-packages/idna/codec.py | 122 + lib/python3.11/site-packages/idna/compat.py | 15 + lib/python3.11/site-packages/idna/core.py | 437 + lib/python3.11/site-packages/idna/idnadata.py | 4243 ++++++++ .../site-packages/idna/intranges.py | 57 + .../site-packages/idna/package_data.py | 1 + lib/python3.11/site-packages/idna/py.typed | 0 .../site-packages/idna/uts46data.py | 8681 +++++++++++++++++ .../lxml-5.3.0.dist-info/INSTALLER | 1 + .../lxml-5.3.0.dist-info/LICENSE.txt | 29 + .../lxml-5.3.0.dist-info/LICENSES.txt | 29 + .../lxml-5.3.0.dist-info/METADATA | 103 + .../site-packages/lxml-5.3.0.dist-info/RECORD | 202 + .../lxml-5.3.0.dist-info/REQUESTED | 0 .../site-packages/lxml-5.3.0.dist-info/WHEEL | 5 + .../lxml-5.3.0.dist-info/top_level.txt | 1 + .../site-packages/lxml/ElementInclude.py | 244 + lib/python3.11/site-packages/lxml/__init__.py | 22 + .../site-packages/lxml/_elementpath.py | 341 + .../site-packages/lxml/apihelpers.pxi | 1793 ++++ lib/python3.11/site-packages/lxml/builder.py | 232 + .../site-packages/lxml/classlookup.pxi | 580 ++ lib/python3.11/site-packages/lxml/cleanup.pxi | 215 + .../site-packages/lxml/cssselect.py | 101 + lib/python3.11/site-packages/lxml/debug.pxi | 90 + .../site-packages/lxml/docloader.pxi | 178 + .../site-packages/lxml/doctestcompare.py | 488 + lib/python3.11/site-packages/lxml/dtd.pxi | 478 + lib/python3.11/site-packages/lxml/etree.h | 248 + lib/python3.11/site-packages/lxml/etree.pyx | 3731 +++++++ lib/python3.11/site-packages/lxml/etree_api.h | 195 + .../site-packages/lxml/extensions.pxi | 833 ++ .../site-packages/lxml/html/ElementSoup.py | 10 + .../site-packages/lxml/html/__init__.py | 1923 ++++ .../site-packages/lxml/html/_diffcommand.py | 86 + .../site-packages/lxml/html/_html5builder.py | 100 + .../site-packages/lxml/html/_setmixin.py | 56 + .../site-packages/lxml/html/builder.py | 133 + .../site-packages/lxml/html/clean.py | 21 + .../site-packages/lxml/html/defs.py | 135 + .../site-packages/lxml/html/diff.py | 878 ++ .../site-packages/lxml/html/formfill.py | 299 + .../site-packages/lxml/html/html5parser.py | 260 + .../site-packages/lxml/html/soupparser.py | 314 + .../site-packages/lxml/html/usedoctest.py | 13 + .../site-packages/lxml/includes/__init__.pxd | 0 .../site-packages/lxml/includes/__init__.py | 0 .../site-packages/lxml/includes/c14n.pxd | 25 + .../site-packages/lxml/includes/config.pxd | 3 + .../site-packages/lxml/includes/dtdvalid.pxd | 18 + .../site-packages/lxml/includes/etree_defs.h | 379 + .../lxml/includes/etreepublic.pxd | 237 + .../lxml/includes/extlibs/__init__.py | 0 .../lxml/includes/extlibs/libcharset.h | 45 + .../lxml/includes/extlibs/localcharset.h | 137 + .../lxml/includes/extlibs/zconf.h | 543 ++ .../lxml/includes/extlibs/zlib.h | 1938 ++++ .../lxml/includes/htmlparser.pxd | 56 + .../lxml/includes/libexslt/__init__.py | 0 .../lxml/includes/libexslt/exslt.h | 108 + .../lxml/includes/libexslt/exsltconfig.h | 70 + .../lxml/includes/libexslt/exsltexports.h | 63 + .../lxml/includes/libxml/HTMLparser.h | 343 + .../lxml/includes/libxml/HTMLtree.h | 147 + .../site-packages/lxml/includes/libxml/SAX.h | 202 + .../site-packages/lxml/includes/libxml/SAX2.h | 171 + .../lxml/includes/libxml/__init__.py | 0 .../site-packages/lxml/includes/libxml/c14n.h | 126 + .../lxml/includes/libxml/catalog.h | 182 + .../lxml/includes/libxml/chvalid.h | 230 + .../lxml/includes/libxml/debugXML.h | 217 + .../site-packages/lxml/includes/libxml/dict.h | 82 + .../lxml/includes/libxml/encoding.h | 235 + .../lxml/includes/libxml/entities.h | 155 + .../lxml/includes/libxml/globals.h | 41 + .../site-packages/lxml/includes/libxml/hash.h | 232 + .../site-packages/lxml/includes/libxml/list.h | 137 + .../lxml/includes/libxml/nanoftp.h | 186 + .../lxml/includes/libxml/nanohttp.h | 81 + .../lxml/includes/libxml/parser.h | 1384 +++ .../lxml/includes/libxml/parserInternals.h | 663 ++ .../lxml/includes/libxml/relaxng.h | 219 + .../lxml/includes/libxml/schemasInternals.h | 959 ++ .../lxml/includes/libxml/schematron.h | 143 + .../lxml/includes/libxml/threads.h | 87 + .../site-packages/lxml/includes/libxml/tree.h | 1362 +++ .../site-packages/lxml/includes/libxml/uri.h | 95 + .../lxml/includes/libxml/valid.h | 450 + .../lxml/includes/libxml/xinclude.h | 129 + .../lxml/includes/libxml/xlink.h | 189 + .../lxml/includes/libxml/xmlIO.h | 421 + .../lxml/includes/libxml/xmlautomata.h | 146 + .../lxml/includes/libxml/xmlerror.h | 948 ++ .../lxml/includes/libxml/xmlexports.h | 50 + .../lxml/includes/libxml/xmlmemory.h | 225 + .../lxml/includes/libxml/xmlmodule.h | 57 + .../lxml/includes/libxml/xmlreader.h | 434 + .../lxml/includes/libxml/xmlregexp.h | 215 + .../lxml/includes/libxml/xmlsave.h | 97 + .../lxml/includes/libxml/xmlschemas.h | 249 + .../lxml/includes/libxml/xmlschemastypes.h | 152 + .../lxml/includes/libxml/xmlstring.h | 140 + .../lxml/includes/libxml/xmlunicode.h | 202 + .../lxml/includes/libxml/xmlversion.h | 511 + .../lxml/includes/libxml/xmlwriter.h | 488 + .../lxml/includes/libxml/xpath.h | 575 ++ .../lxml/includes/libxml/xpathInternals.h | 633 ++ .../lxml/includes/libxml/xpointer.h | 138 + .../lxml/includes/libxslt/__init__.py | 0 .../lxml/includes/libxslt/attributes.h | 39 + .../lxml/includes/libxslt/documents.h | 93 + .../lxml/includes/libxslt/extensions.h | 262 + .../lxml/includes/libxslt/extra.h | 72 + .../lxml/includes/libxslt/functions.h | 78 + .../lxml/includes/libxslt/imports.h | 75 + .../lxml/includes/libxslt/keys.h | 53 + .../lxml/includes/libxslt/namespaces.h | 68 + .../lxml/includes/libxslt/numbersInternals.h | 73 + .../lxml/includes/libxslt/pattern.h | 84 + .../lxml/includes/libxslt/preproc.h | 43 + .../lxml/includes/libxslt/security.h | 104 + .../lxml/includes/libxslt/templates.h | 77 + .../lxml/includes/libxslt/transform.h | 207 + .../lxml/includes/libxslt/variables.h | 118 + .../lxml/includes/libxslt/xslt.h | 110 + .../lxml/includes/libxslt/xsltInternals.h | 1995 ++++ .../lxml/includes/libxslt/xsltconfig.h | 146 + .../lxml/includes/libxslt/xsltexports.h | 64 + .../lxml/includes/libxslt/xsltlocale.h | 44 + .../lxml/includes/libxslt/xsltutils.h | 343 + .../lxml/includes/lxml-version.h | 3 + .../site-packages/lxml/includes/relaxng.pxd | 64 + .../lxml/includes/schematron.pxd | 34 + .../site-packages/lxml/includes/tree.pxd | 494 + .../site-packages/lxml/includes/uri.pxd | 5 + .../site-packages/lxml/includes/xinclude.pxd | 22 + .../site-packages/lxml/includes/xmlerror.pxd | 852 ++ .../site-packages/lxml/includes/xmlparser.pxd | 265 + .../site-packages/lxml/includes/xmlschema.pxd | 35 + .../site-packages/lxml/includes/xpath.pxd | 136 + .../site-packages/lxml/includes/xslt.pxd | 190 + .../lxml/isoschematron/__init__.py | 348 + .../resources/rng/iso-schematron.rng | 709 ++ .../resources/xsl/RNG2Schtrn.xsl | 75 + .../resources/xsl/XSD2Schtrn.xsl | 77 + .../iso_abstract_expand.xsl | 313 + .../iso-schematron-xslt1/iso_dsdl_include.xsl | 1160 +++ .../iso_schematron_message.xsl | 55 + .../iso_schematron_skeleton_for_xslt1.xsl | 1796 ++++ .../iso_svrl_for_xslt1.xsl | 588 ++ .../xsl/iso-schematron-xslt1/readme.txt | 84 + .../site-packages/lxml/iterparse.pxi | 438 + .../site-packages/lxml/lxml.etree.h | 248 + .../site-packages/lxml/lxml.etree_api.h | 195 + .../site-packages/lxml/nsclasses.pxi | 281 + .../site-packages/lxml/objectify.pyx | 2145 ++++ .../site-packages/lxml/objectpath.pxi | 332 + lib/python3.11/site-packages/lxml/parser.pxi | 2000 ++++ .../site-packages/lxml/parsertarget.pxi | 180 + lib/python3.11/site-packages/lxml/proxy.pxi | 619 ++ .../site-packages/lxml/public-api.pxi | 178 + .../site-packages/lxml/pyclasslookup.py | 3 + .../site-packages/lxml/readonlytree.pxi | 565 ++ lib/python3.11/site-packages/lxml/relaxng.pxi | 165 + lib/python3.11/site-packages/lxml/sax.py | 275 + .../site-packages/lxml/saxparser.pxi | 875 ++ .../site-packages/lxml/schematron.pxi | 168 + .../site-packages/lxml/serializer.pxi | 1781 ++++ .../site-packages/lxml/usedoctest.py | 13 + .../site-packages/lxml/xinclude.pxi | 67 + .../site-packages/lxml/xmlerror.pxi | 1654 ++++ lib/python3.11/site-packages/lxml/xmlid.pxi | 179 + .../site-packages/lxml/xmlschema.pxi | 215 + lib/python3.11/site-packages/lxml/xpath.pxi | 487 + lib/python3.11/site-packages/lxml/xslt.pxi | 950 ++ lib/python3.11/site-packages/lxml/xsltext.pxi | 242 + .../pip-24.3.1.dist-info/AUTHORS.txt | 799 ++ .../pip-24.3.1.dist-info/INSTALLER | 1 + .../pip-24.3.1.dist-info/LICENSE.txt | 20 + .../pip-24.3.1.dist-info/METADATA | 90 + .../site-packages/pip-24.3.1.dist-info/RECORD | 905 ++ .../site-packages/pip-24.3.1.dist-info/WHEEL | 5 + .../pip-24.3.1.dist-info/entry_points.txt | 3 + .../pip-24.3.1.dist-info/top_level.txt | 1 + .../site-packages/pip-24.3.1.virtualenv | 0 lib/python3.11/site-packages/pip/__init__.py | 13 + lib/python3.11/site-packages/pip/__main__.py | 24 + .../site-packages/pip/__pip-runner__.py | 50 + .../site-packages/pip/_internal/__init__.py | 18 + .../site-packages/pip/_internal/build_env.py | 319 + .../site-packages/pip/_internal/cache.py | 290 + .../pip/_internal/cli/__init__.py | 4 + .../pip/_internal/cli/autocompletion.py | 176 + .../pip/_internal/cli/base_command.py | 231 + .../pip/_internal/cli/cmdoptions.py | 1075 ++ .../pip/_internal/cli/command_context.py | 27 + .../pip/_internal/cli/index_command.py | 170 + .../site-packages/pip/_internal/cli/main.py | 80 + .../pip/_internal/cli/main_parser.py | 134 + .../site-packages/pip/_internal/cli/parser.py | 294 + .../pip/_internal/cli/progress_bars.py | 94 + .../pip/_internal/cli/req_command.py | 329 + .../pip/_internal/cli/spinners.py | 159 + .../pip/_internal/cli/status_codes.py | 6 + .../pip/_internal/commands/__init__.py | 132 + .../pip/_internal/commands/cache.py | 225 + .../pip/_internal/commands/check.py | 67 + .../pip/_internal/commands/completion.py | 130 + .../pip/_internal/commands/configuration.py | 280 + .../pip/_internal/commands/debug.py | 201 + .../pip/_internal/commands/download.py | 146 + .../pip/_internal/commands/freeze.py | 109 + .../pip/_internal/commands/hash.py | 59 + .../pip/_internal/commands/help.py | 41 + .../pip/_internal/commands/index.py | 139 + .../pip/_internal/commands/inspect.py | 92 + .../pip/_internal/commands/install.py | 783 ++ .../pip/_internal/commands/list.py | 375 + .../pip/_internal/commands/search.py | 172 + .../pip/_internal/commands/show.py | 217 + .../pip/_internal/commands/uninstall.py | 114 + .../pip/_internal/commands/wheel.py | 182 + .../pip/_internal/configuration.py | 383 + .../pip/_internal/distributions/__init__.py | 21 + .../pip/_internal/distributions/base.py | 53 + .../pip/_internal/distributions/installed.py | 29 + .../pip/_internal/distributions/sdist.py | 158 + .../pip/_internal/distributions/wheel.py | 42 + .../site-packages/pip/_internal/exceptions.py | 809 ++ .../pip/_internal/index/__init__.py | 2 + .../pip/_internal/index/collector.py | 494 + .../pip/_internal/index/package_finder.py | 1020 ++ .../pip/_internal/index/sources.py | 284 + .../pip/_internal/locations/__init__.py | 456 + .../pip/_internal/locations/_distutils.py | 172 + .../pip/_internal/locations/_sysconfig.py | 214 + .../pip/_internal/locations/base.py | 81 + .../site-packages/pip/_internal/main.py | 12 + .../pip/_internal/metadata/__init__.py | 128 + .../pip/_internal/metadata/_json.py | 84 + .../pip/_internal/metadata/base.py | 688 ++ .../_internal/metadata/importlib/__init__.py | 6 + .../_internal/metadata/importlib/_compat.py | 85 + .../_internal/metadata/importlib/_dists.py | 221 + .../pip/_internal/metadata/importlib/_envs.py | 189 + .../pip/_internal/metadata/pkg_resources.py | 301 + .../pip/_internal/models/__init__.py | 2 + .../pip/_internal/models/candidate.py | 25 + .../pip/_internal/models/direct_url.py | 224 + .../pip/_internal/models/format_control.py | 78 + .../pip/_internal/models/index.py | 28 + .../_internal/models/installation_report.py | 56 + .../pip/_internal/models/link.py | 590 ++ .../pip/_internal/models/scheme.py | 25 + .../pip/_internal/models/search_scope.py | 127 + .../pip/_internal/models/selection_prefs.py | 53 + .../pip/_internal/models/target_python.py | 121 + .../pip/_internal/models/wheel.py | 118 + .../pip/_internal/network/__init__.py | 2 + .../pip/_internal/network/auth.py | 566 ++ .../pip/_internal/network/cache.py | 106 + .../pip/_internal/network/download.py | 187 + .../pip/_internal/network/lazy_wheel.py | 210 + .../pip/_internal/network/session.py | 522 + .../pip/_internal/network/utils.py | 98 + .../pip/_internal/network/xmlrpc.py | 62 + .../pip/_internal/operations/__init__.py | 0 .../_internal/operations/build/__init__.py | 0 .../operations/build/build_tracker.py | 138 + .../_internal/operations/build/metadata.py | 39 + .../operations/build/metadata_editable.py | 41 + .../operations/build/metadata_legacy.py | 74 + .../pip/_internal/operations/build/wheel.py | 37 + .../operations/build/wheel_editable.py | 46 + .../operations/build/wheel_legacy.py | 102 + .../pip/_internal/operations/check.py | 181 + .../pip/_internal/operations/freeze.py | 258 + .../_internal/operations/install/__init__.py | 2 + .../operations/install/editable_legacy.py | 47 + .../pip/_internal/operations/install/wheel.py | 741 ++ .../pip/_internal/operations/prepare.py | 732 ++ .../site-packages/pip/_internal/pyproject.py | 185 + .../pip/_internal/req/__init__.py | 90 + .../pip/_internal/req/constructors.py | 560 ++ .../pip/_internal/req/req_file.py | 574 ++ .../pip/_internal/req/req_install.py | 934 ++ .../pip/_internal/req/req_set.py | 82 + .../pip/_internal/req/req_uninstall.py | 633 ++ .../pip/_internal/resolution/__init__.py | 0 .../pip/_internal/resolution/base.py | 20 + .../_internal/resolution/legacy/__init__.py | 0 .../_internal/resolution/legacy/resolver.py | 597 ++ .../resolution/resolvelib/__init__.py | 0 .../_internal/resolution/resolvelib/base.py | 139 + .../resolution/resolvelib/candidates.py | 574 ++ .../resolution/resolvelib/factory.py | 823 ++ .../resolution/resolvelib/found_candidates.py | 174 + .../resolution/resolvelib/provider.py | 258 + .../resolution/resolvelib/reporter.py | 81 + .../resolution/resolvelib/requirements.py | 245 + .../resolution/resolvelib/resolver.py | 317 + .../pip/_internal/self_outdated_check.py | 244 + .../pip/_internal/utils/__init__.py | 0 .../pip/_internal/utils/_jaraco_text.py | 109 + .../site-packages/pip/_internal/utils/_log.py | 38 + .../pip/_internal/utils/appdirs.py | 52 + .../pip/_internal/utils/compat.py | 79 + .../pip/_internal/utils/compatibility_tags.py | 188 + .../pip/_internal/utils/datetime.py | 11 + .../pip/_internal/utils/deprecation.py | 124 + .../pip/_internal/utils/direct_url_helpers.py | 87 + .../pip/_internal/utils/egg_link.py | 80 + .../pip/_internal/utils/encoding.py | 36 + .../pip/_internal/utils/entrypoints.py | 84 + .../pip/_internal/utils/filesystem.py | 149 + .../pip/_internal/utils/filetypes.py | 27 + .../pip/_internal/utils/glibc.py | 101 + .../pip/_internal/utils/hashes.py | 147 + .../pip/_internal/utils/logging.py | 347 + .../site-packages/pip/_internal/utils/misc.py | 772 ++ .../pip/_internal/utils/packaging.py | 57 + .../pip/_internal/utils/retry.py | 42 + .../pip/_internal/utils/setuptools_build.py | 146 + .../pip/_internal/utils/subprocess.py | 245 + .../pip/_internal/utils/temp_dir.py | 296 + .../pip/_internal/utils/unpacking.py | 337 + .../site-packages/pip/_internal/utils/urls.py | 55 + .../pip/_internal/utils/virtualenv.py | 104 + .../pip/_internal/utils/wheel.py | 134 + .../pip/_internal/vcs/__init__.py | 15 + .../site-packages/pip/_internal/vcs/bazaar.py | 112 + .../site-packages/pip/_internal/vcs/git.py | 527 + .../pip/_internal/vcs/mercurial.py | 163 + .../pip/_internal/vcs/subversion.py | 324 + .../pip/_internal/vcs/versioncontrol.py | 688 ++ .../pip/_internal/wheel_builder.py | 354 + .../site-packages/pip/_vendor/__init__.py | 116 + .../pip/_vendor/cachecontrol/__init__.py | 28 + .../pip/_vendor/cachecontrol/_cmd.py | 70 + .../pip/_vendor/cachecontrol/adapter.py | 161 + .../pip/_vendor/cachecontrol/cache.py | 74 + .../_vendor/cachecontrol/caches/__init__.py | 8 + .../_vendor/cachecontrol/caches/file_cache.py | 182 + .../cachecontrol/caches/redis_cache.py | 48 + .../pip/_vendor/cachecontrol/controller.py | 499 + .../pip/_vendor/cachecontrol/filewrapper.py | 119 + .../pip/_vendor/cachecontrol/heuristics.py | 154 + .../pip/_vendor/cachecontrol/py.typed | 0 .../pip/_vendor/cachecontrol/serialize.py | 146 + .../pip/_vendor/cachecontrol/wrapper.py | 43 + .../pip/_vendor/certifi/__init__.py | 4 + .../pip/_vendor/certifi/__main__.py | 12 + .../pip/_vendor/certifi/cacert.pem | 4929 ++++++++++ .../site-packages/pip/_vendor/certifi/core.py | 114 + .../pip/_vendor/certifi/py.typed | 0 .../pip/_vendor/distlib/__init__.py | 33 + .../pip/_vendor/distlib/compat.py | 1137 +++ .../pip/_vendor/distlib/database.py | 1329 +++ .../pip/_vendor/distlib/index.py | 508 + .../pip/_vendor/distlib/locators.py | 1295 +++ .../pip/_vendor/distlib/manifest.py | 384 + .../pip/_vendor/distlib/markers.py | 162 + .../pip/_vendor/distlib/metadata.py | 1031 ++ .../pip/_vendor/distlib/resources.py | 358 + .../pip/_vendor/distlib/scripts.py | 447 + .../site-packages/pip/_vendor/distlib/util.py | 1984 ++++ .../pip/_vendor/distlib/version.py | 750 ++ .../pip/_vendor/distlib/wheel.py | 1100 +++ .../pip/_vendor/distro/__init__.py | 54 + .../pip/_vendor/distro/__main__.py | 4 + .../pip/_vendor/distro/distro.py | 1403 +++ .../site-packages/pip/_vendor/distro/py.typed | 0 .../pip/_vendor/idna/__init__.py | 44 + .../site-packages/pip/_vendor/idna/codec.py | 118 + .../site-packages/pip/_vendor/idna/compat.py | 13 + .../site-packages/pip/_vendor/idna/core.py | 395 + .../pip/_vendor/idna/idnadata.py | 4245 ++++++++ .../pip/_vendor/idna/intranges.py | 54 + .../pip/_vendor/idna/package_data.py | 2 + .../site-packages/pip/_vendor/idna/py.typed | 0 .../pip/_vendor/idna/uts46data.py | 8598 ++++++++++++++++ .../pip/_vendor/msgpack/__init__.py | 55 + .../pip/_vendor/msgpack/exceptions.py | 48 + .../site-packages/pip/_vendor/msgpack/ext.py | 168 + .../pip/_vendor/msgpack/fallback.py | 951 ++ .../pip/_vendor/packaging/__init__.py | 15 + .../pip/_vendor/packaging/_elffile.py | 110 + .../pip/_vendor/packaging/_manylinux.py | 262 + .../pip/_vendor/packaging/_musllinux.py | 85 + .../pip/_vendor/packaging/_parser.py | 354 + .../pip/_vendor/packaging/_structures.py | 61 + .../pip/_vendor/packaging/_tokenizer.py | 194 + .../pip/_vendor/packaging/markers.py | 325 + .../pip/_vendor/packaging/metadata.py | 804 ++ .../pip/_vendor/packaging/py.typed | 0 .../pip/_vendor/packaging/requirements.py | 91 + .../pip/_vendor/packaging/specifiers.py | 1009 ++ .../pip/_vendor/packaging/tags.py | 627 ++ .../pip/_vendor/packaging/utils.py | 174 + .../pip/_vendor/packaging/version.py | 563 ++ .../pip/_vendor/pkg_resources/__init__.py | 3676 +++++++ .../pip/_vendor/platformdirs/__init__.py | 627 ++ .../pip/_vendor/platformdirs/__main__.py | 55 + .../pip/_vendor/platformdirs/android.py | 249 + .../pip/_vendor/platformdirs/api.py | 292 + .../pip/_vendor/platformdirs/macos.py | 130 + .../pip/_vendor/platformdirs/py.typed | 0 .../pip/_vendor/platformdirs/unix.py | 275 + .../pip/_vendor/platformdirs/version.py | 16 + .../pip/_vendor/platformdirs/windows.py | 272 + .../pip/_vendor/pygments/__init__.py | 82 + .../pip/_vendor/pygments/__main__.py | 17 + .../pip/_vendor/pygments/cmdline.py | 668 ++ .../pip/_vendor/pygments/console.py | 70 + .../pip/_vendor/pygments/filter.py | 70 + .../pip/_vendor/pygments/filters/__init__.py | 940 ++ .../pip/_vendor/pygments/formatter.py | 129 + .../_vendor/pygments/formatters/__init__.py | 157 + .../_vendor/pygments/formatters/_mapping.py | 23 + .../pip/_vendor/pygments/formatters/bbcode.py | 108 + .../pip/_vendor/pygments/formatters/groff.py | 170 + .../pip/_vendor/pygments/formatters/html.py | 987 ++ .../pip/_vendor/pygments/formatters/img.py | 685 ++ .../pip/_vendor/pygments/formatters/irc.py | 154 + .../pip/_vendor/pygments/formatters/latex.py | 518 + .../pip/_vendor/pygments/formatters/other.py | 160 + .../pygments/formatters/pangomarkup.py | 83 + .../pip/_vendor/pygments/formatters/rtf.py | 349 + .../pip/_vendor/pygments/formatters/svg.py | 185 + .../_vendor/pygments/formatters/terminal.py | 127 + .../pygments/formatters/terminal256.py | 338 + .../pip/_vendor/pygments/lexer.py | 963 ++ .../pip/_vendor/pygments/lexers/__init__.py | 362 + .../pip/_vendor/pygments/lexers/_mapping.py | 589 ++ .../pip/_vendor/pygments/lexers/python.py | 1198 +++ .../pip/_vendor/pygments/modeline.py | 43 + .../pip/_vendor/pygments/plugin.py | 72 + .../pip/_vendor/pygments/regexopt.py | 91 + .../pip/_vendor/pygments/scanner.py | 104 + .../pip/_vendor/pygments/sphinxext.py | 247 + .../pip/_vendor/pygments/style.py | 203 + .../pip/_vendor/pygments/styles/__init__.py | 61 + .../pip/_vendor/pygments/styles/_mapping.py | 54 + .../pip/_vendor/pygments/token.py | 214 + .../pip/_vendor/pygments/unistring.py | 153 + .../pip/_vendor/pygments/util.py | 324 + .../pip/_vendor/pyproject_hooks/__init__.py | 23 + .../pip/_vendor/pyproject_hooks/_compat.py | 8 + .../pip/_vendor/pyproject_hooks/_impl.py | 330 + .../pyproject_hooks/_in_process/__init__.py | 18 + .../_in_process/_in_process.py | 353 + .../pip/_vendor/requests/__init__.py | 179 + .../pip/_vendor/requests/__version__.py | 14 + .../pip/_vendor/requests/_internal_utils.py | 50 + .../pip/_vendor/requests/adapters.py | 719 ++ .../site-packages/pip/_vendor/requests/api.py | 157 + .../pip/_vendor/requests/auth.py | 314 + .../pip/_vendor/requests/certs.py | 24 + .../pip/_vendor/requests/compat.py | 78 + .../pip/_vendor/requests/cookies.py | 561 ++ .../pip/_vendor/requests/exceptions.py | 151 + .../pip/_vendor/requests/help.py | 127 + .../pip/_vendor/requests/hooks.py | 33 + .../pip/_vendor/requests/models.py | 1037 ++ .../pip/_vendor/requests/packages.py | 25 + .../pip/_vendor/requests/sessions.py | 831 ++ .../pip/_vendor/requests/status_codes.py | 128 + .../pip/_vendor/requests/structures.py | 99 + .../pip/_vendor/requests/utils.py | 1096 +++ .../pip/_vendor/resolvelib/__init__.py | 26 + .../pip/_vendor/resolvelib/compat/__init__.py | 0 .../resolvelib/compat/collections_abc.py | 6 + .../pip/_vendor/resolvelib/providers.py | 133 + .../pip/_vendor/resolvelib/py.typed | 0 .../pip/_vendor/resolvelib/reporters.py | 43 + .../pip/_vendor/resolvelib/resolvers.py | 547 ++ .../pip/_vendor/resolvelib/structs.py | 170 + .../pip/_vendor/rich/__init__.py | 177 + .../pip/_vendor/rich/__main__.py | 273 + .../pip/_vendor/rich/_cell_widths.py | 454 + .../pip/_vendor/rich/_emoji_codes.py | 3610 +++++++ .../pip/_vendor/rich/_emoji_replace.py | 32 + .../pip/_vendor/rich/_export_format.py | 76 + .../pip/_vendor/rich/_extension.py | 10 + .../site-packages/pip/_vendor/rich/_fileno.py | 24 + .../pip/_vendor/rich/_inspect.py | 270 + .../pip/_vendor/rich/_log_render.py | 94 + .../site-packages/pip/_vendor/rich/_loop.py | 43 + .../pip/_vendor/rich/_null_file.py | 69 + .../pip/_vendor/rich/_palettes.py | 309 + .../site-packages/pip/_vendor/rich/_pick.py | 17 + .../site-packages/pip/_vendor/rich/_ratio.py | 159 + .../pip/_vendor/rich/_spinners.py | 482 + .../site-packages/pip/_vendor/rich/_stack.py | 16 + .../site-packages/pip/_vendor/rich/_timer.py | 19 + .../pip/_vendor/rich/_win32_console.py | 662 ++ .../pip/_vendor/rich/_windows.py | 71 + .../pip/_vendor/rich/_windows_renderer.py | 56 + .../site-packages/pip/_vendor/rich/_wrap.py | 93 + .../site-packages/pip/_vendor/rich/abc.py | 33 + .../site-packages/pip/_vendor/rich/align.py | 311 + .../site-packages/pip/_vendor/rich/ansi.py | 240 + .../site-packages/pip/_vendor/rich/bar.py | 93 + .../site-packages/pip/_vendor/rich/box.py | 480 + .../site-packages/pip/_vendor/rich/cells.py | 167 + .../site-packages/pip/_vendor/rich/color.py | 621 ++ .../pip/_vendor/rich/color_triplet.py | 38 + .../site-packages/pip/_vendor/rich/columns.py | 187 + .../site-packages/pip/_vendor/rich/console.py | 2633 +++++ .../pip/_vendor/rich/constrain.py | 37 + .../pip/_vendor/rich/containers.py | 167 + .../site-packages/pip/_vendor/rich/control.py | 225 + .../pip/_vendor/rich/default_styles.py | 190 + .../pip/_vendor/rich/diagnose.py | 37 + .../site-packages/pip/_vendor/rich/emoji.py | 96 + .../site-packages/pip/_vendor/rich/errors.py | 34 + .../pip/_vendor/rich/file_proxy.py | 57 + .../pip/_vendor/rich/filesize.py | 89 + .../pip/_vendor/rich/highlighter.py | 232 + .../site-packages/pip/_vendor/rich/json.py | 139 + .../site-packages/pip/_vendor/rich/jupyter.py | 101 + .../site-packages/pip/_vendor/rich/layout.py | 442 + .../site-packages/pip/_vendor/rich/live.py | 375 + .../pip/_vendor/rich/live_render.py | 112 + .../site-packages/pip/_vendor/rich/logging.py | 289 + .../site-packages/pip/_vendor/rich/markup.py | 251 + .../site-packages/pip/_vendor/rich/measure.py | 151 + .../site-packages/pip/_vendor/rich/padding.py | 141 + .../site-packages/pip/_vendor/rich/pager.py | 34 + .../site-packages/pip/_vendor/rich/palette.py | 100 + .../site-packages/pip/_vendor/rich/panel.py | 312 + .../site-packages/pip/_vendor/rich/pretty.py | 995 ++ .../pip/_vendor/rich/progress.py | 1699 ++++ .../pip/_vendor/rich/progress_bar.py | 223 + .../site-packages/pip/_vendor/rich/prompt.py | 375 + .../pip/_vendor/rich/protocol.py | 42 + .../site-packages/pip/_vendor/rich/py.typed | 0 .../site-packages/pip/_vendor/rich/region.py | 10 + .../site-packages/pip/_vendor/rich/repr.py | 149 + .../site-packages/pip/_vendor/rich/rule.py | 130 + .../site-packages/pip/_vendor/rich/scope.py | 86 + .../site-packages/pip/_vendor/rich/screen.py | 54 + .../site-packages/pip/_vendor/rich/segment.py | 738 ++ .../site-packages/pip/_vendor/rich/spinner.py | 137 + .../site-packages/pip/_vendor/rich/status.py | 131 + .../site-packages/pip/_vendor/rich/style.py | 796 ++ .../site-packages/pip/_vendor/rich/styled.py | 42 + .../site-packages/pip/_vendor/rich/syntax.py | 958 ++ .../site-packages/pip/_vendor/rich/table.py | 1000 ++ .../pip/_vendor/rich/terminal_theme.py | 153 + .../site-packages/pip/_vendor/rich/text.py | 1357 +++ .../site-packages/pip/_vendor/rich/theme.py | 115 + .../site-packages/pip/_vendor/rich/themes.py | 5 + .../pip/_vendor/rich/traceback.py | 753 ++ .../site-packages/pip/_vendor/rich/tree.py | 249 + .../pip/_vendor/tomli/__init__.py | 11 + .../pip/_vendor/tomli/_parser.py | 691 ++ .../site-packages/pip/_vendor/tomli/_re.py | 107 + .../site-packages/pip/_vendor/tomli/_types.py | 10 + .../site-packages/pip/_vendor/tomli/py.typed | 1 + .../pip/_vendor/truststore/__init__.py | 36 + .../pip/_vendor/truststore/_api.py | 316 + .../pip/_vendor/truststore/_macos.py | 571 ++ .../pip/_vendor/truststore/_openssl.py | 66 + .../pip/_vendor/truststore/_ssl_constants.py | 31 + .../pip/_vendor/truststore/_windows.py | 567 ++ .../pip/_vendor/truststore/py.typed | 0 .../pip/_vendor/typing_extensions.py | 3641 +++++++ .../pip/_vendor/urllib3/__init__.py | 102 + .../pip/_vendor/urllib3/_collections.py | 355 + .../pip/_vendor/urllib3/_version.py | 2 + .../pip/_vendor/urllib3/connection.py | 572 ++ .../pip/_vendor/urllib3/connectionpool.py | 1140 +++ .../pip/_vendor/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 36 + .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 519 + .../contrib/_securetransport/low_level.py | 397 + .../pip/_vendor/urllib3/contrib/appengine.py | 314 + .../pip/_vendor/urllib3/contrib/ntlmpool.py | 130 + .../pip/_vendor/urllib3/contrib/pyopenssl.py | 518 + .../urllib3/contrib/securetransport.py | 920 ++ .../pip/_vendor/urllib3/contrib/socks.py | 216 + .../pip/_vendor/urllib3/exceptions.py | 323 + .../pip/_vendor/urllib3/fields.py | 274 + .../pip/_vendor/urllib3/filepost.py | 98 + .../pip/_vendor/urllib3/packages/__init__.py | 0 .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 51 + .../packages/backports/weakref_finalize.py | 155 + .../pip/_vendor/urllib3/packages/six.py | 1076 ++ .../pip/_vendor/urllib3/poolmanager.py | 540 + .../pip/_vendor/urllib3/request.py | 191 + .../pip/_vendor/urllib3/response.py | 879 ++ .../pip/_vendor/urllib3/util/__init__.py | 49 + .../pip/_vendor/urllib3/util/connection.py | 149 + .../pip/_vendor/urllib3/util/proxy.py | 57 + .../pip/_vendor/urllib3/util/queue.py | 22 + .../pip/_vendor/urllib3/util/request.py | 137 + .../pip/_vendor/urllib3/util/response.py | 107 + .../pip/_vendor/urllib3/util/retry.py | 622 ++ .../pip/_vendor/urllib3/util/ssl_.py | 504 + .../urllib3/util/ssl_match_hostname.py | 159 + .../pip/_vendor/urllib3/util/ssltransport.py | 221 + .../pip/_vendor/urllib3/util/timeout.py | 271 + .../pip/_vendor/urllib3/util/url.py | 435 + .../pip/_vendor/urllib3/util/wait.py | 152 + .../site-packages/pip/_vendor/vendor.txt | 18 + lib/python3.11/site-packages/pip/py.typed | 4 + .../site-packages/pkg_resources/__init__.py | 3720 +++++++ .../site-packages/pkg_resources/api_tests.txt | 424 + .../site-packages/pkg_resources/py.typed | 0 .../pkg_resources/tests/__init__.py | 0 .../data/my-test-package-source/setup.cfg | 0 .../data/my-test-package-source/setup.py | 7 + .../EGG-INFO/PKG-INFO | 10 + .../EGG-INFO/SOURCES.txt | 7 + .../EGG-INFO/dependency_links.txt | 1 + .../EGG-INFO/top_level.txt | 1 + .../EGG-INFO/zip-safe | 1 + .../my_test_package-1.0-py3.7.egg | Bin 0 -> 843 bytes .../tests/test_find_distributions.py | 56 + .../tests/test_integration_zope_interface.py | 54 + .../pkg_resources/tests/test_markers.py | 8 + .../pkg_resources/tests/test_pkg_resources.py | 427 + .../pkg_resources/tests/test_resources.py | 869 ++ .../pkg_resources/tests/test_working_set.py | 501 + .../pyotp-2.9.0.dist-info/INSTALLER | 1 + .../pyotp-2.9.0.dist-info/LICENSE | 21 + .../pyotp-2.9.0.dist-info/METADATA | 215 + .../pyotp-2.9.0.dist-info/RECORD | 24 + .../pyotp-2.9.0.dist-info/REQUESTED | 0 .../site-packages/pyotp-2.9.0.dist-info/WHEEL | 5 + .../pyotp-2.9.0.dist-info/top_level.txt | 1 + .../site-packages/pyotp/__init__.py | 110 + lib/python3.11/site-packages/pyotp/compat.py | 7 + .../site-packages/pyotp/contrib/__init__.py | 1 + .../site-packages/pyotp/contrib/steam.py | 49 + lib/python3.11/site-packages/pyotp/hotp.py | 83 + lib/python3.11/site-packages/pyotp/otp.py | 68 + lib/python3.11/site-packages/pyotp/py.typed | 0 lib/python3.11/site-packages/pyotp/totp.py | 119 + lib/python3.11/site-packages/pyotp/utils.py | 88 + .../requests-2.32.3.dist-info/INSTALLER | 1 + .../requests-2.32.3.dist-info/LICENSE | 175 + .../requests-2.32.3.dist-info/METADATA | 119 + .../requests-2.32.3.dist-info/RECORD | 43 + .../requests-2.32.3.dist-info/REQUESTED | 0 .../requests-2.32.3.dist-info/WHEEL | 5 + .../requests-2.32.3.dist-info/top_level.txt | 1 + .../site-packages/requests/__init__.py | 184 + .../site-packages/requests/__version__.py | 14 + .../site-packages/requests/_internal_utils.py | 50 + .../site-packages/requests/adapters.py | 719 ++ lib/python3.11/site-packages/requests/api.py | 157 + lib/python3.11/site-packages/requests/auth.py | 314 + .../site-packages/requests/certs.py | 17 + .../site-packages/requests/compat.py | 94 + .../site-packages/requests/cookies.py | 561 ++ .../site-packages/requests/exceptions.py | 151 + lib/python3.11/site-packages/requests/help.py | 134 + .../site-packages/requests/hooks.py | 33 + .../site-packages/requests/models.py | 1037 ++ .../site-packages/requests/packages.py | 23 + .../site-packages/requests/sessions.py | 831 ++ .../site-packages/requests/status_codes.py | 128 + .../site-packages/requests/structures.py | 99 + .../site-packages/requests/utils.py | 1096 +++ .../setuptools-75.6.0.dist-info/INSTALLER | 1 + .../setuptools-75.6.0.dist-info/LICENSE | 17 + .../setuptools-75.6.0.dist-info/METADATA | 142 + .../setuptools-75.6.0.dist-info/RECORD | 927 ++ .../setuptools-75.6.0.dist-info/WHEEL | 5 + .../entry_points.txt | 51 + .../setuptools-75.6.0.dist-info/top_level.txt | 3 + .../setuptools-75.6.0.virtualenv | 0 .../site-packages/setuptools/__init__.py | 288 + .../setuptools/_core_metadata.py | 286 + .../setuptools/_distutils/__init__.py | 14 + .../setuptools/_distutils/_log.py | 3 + .../setuptools/_distutils/_macos_compat.py | 12 + .../setuptools/_distutils/_modified.py | 73 + .../setuptools/_distutils/_msvccompiler.py | 604 ++ .../setuptools/_distutils/archive_util.py | 264 + .../setuptools/_distutils/ccompiler.py | 1256 +++ .../setuptools/_distutils/cmd.py | 439 + .../setuptools/_distutils/command/__init__.py | 23 + .../_distutils/command/_framework_compat.py | 54 + .../setuptools/_distutils/command/bdist.py | 155 + .../_distutils/command/bdist_dumb.py | 140 + .../_distutils/command/bdist_rpm.py | 597 ++ .../setuptools/_distutils/command/build.py | 156 + .../_distutils/command/build_clib.py | 208 + .../_distutils/command/build_ext.py | 796 ++ .../setuptools/_distutils/command/build_py.py | 406 + .../_distutils/command/build_scripts.py | 170 + .../setuptools/_distutils/command/check.py | 154 + .../setuptools/_distutils/command/clean.py | 76 + .../setuptools/_distutils/command/config.py | 369 + .../setuptools/_distutils/command/install.py | 811 ++ .../_distutils/command/install_data.py | 94 + .../_distutils/command/install_egg_info.py | 92 + .../_distutils/command/install_headers.py | 44 + .../_distutils/command/install_lib.py | 234 + .../_distutils/command/install_scripts.py | 61 + .../setuptools/_distutils/command/sdist.py | 515 + .../setuptools/_distutils/compat/__init__.py | 15 + .../setuptools/_distutils/compat/py38.py | 34 + .../setuptools/_distutils/compat/py39.py | 66 + .../setuptools/_distutils/core.py | 286 + .../setuptools/_distutils/cygwinccompiler.py | 339 + .../setuptools/_distutils/debug.py | 5 + .../setuptools/_distutils/dep_util.py | 14 + .../setuptools/_distutils/dir_util.py | 244 + .../setuptools/_distutils/dist.py | 1288 +++ .../setuptools/_distutils/errors.py | 124 + .../setuptools/_distutils/extension.py | 247 + .../setuptools/_distutils/fancy_getopt.py | 469 + .../setuptools/_distutils/file_util.py | 236 + .../setuptools/_distutils/filelist.py | 369 + .../setuptools/_distutils/log.py | 56 + .../setuptools/_distutils/spawn.py | 117 + .../setuptools/_distutils/sysconfig.py | 583 ++ .../setuptools/_distutils/tests/__init__.py | 42 + .../_distutils/tests/compat/__init__.py | 0 .../_distutils/tests/compat/py38.py | 50 + .../setuptools/_distutils/tests/support.py | 134 + .../_distutils/tests/test_archive_util.py | 353 + .../setuptools/_distutils/tests/test_bdist.py | 47 + .../_distutils/tests/test_bdist_dumb.py | 78 + .../_distutils/tests/test_bdist_rpm.py | 128 + .../setuptools/_distutils/tests/test_build.py | 47 + .../_distutils/tests/test_build_clib.py | 134 + .../_distutils/tests/test_build_ext.py | 563 ++ .../_distutils/tests/test_build_py.py | 196 + .../_distutils/tests/test_build_scripts.py | 96 + .../_distutils/tests/test_ccompiler.py | 91 + .../setuptools/_distutils/tests/test_check.py | 194 + .../setuptools/_distutils/tests/test_clean.py | 45 + .../setuptools/_distutils/tests/test_cmd.py | 107 + .../_distutils/tests/test_config_cmd.py | 87 + .../setuptools/_distutils/tests/test_core.py | 130 + .../_distutils/tests/test_cygwinccompiler.py | 81 + .../_distutils/tests/test_dir_util.py | 134 + .../setuptools/_distutils/tests/test_dist.py | 545 ++ .../_distutils/tests/test_extension.py | 108 + .../_distutils/tests/test_file_util.py | 94 + .../_distutils/tests/test_filelist.py | 336 + .../_distutils/tests/test_install.py | 245 + .../_distutils/tests/test_install_data.py | 74 + .../_distutils/tests/test_install_headers.py | 33 + .../_distutils/tests/test_install_lib.py | 110 + .../_distutils/tests/test_install_scripts.py | 52 + .../setuptools/_distutils/tests/test_log.py | 12 + .../_distutils/tests/test_mingwccompiler.py | 56 + .../_distutils/tests/test_modified.py | 126 + .../_distutils/tests/test_msvccompiler.py | 137 + .../setuptools/_distutils/tests/test_sdist.py | 470 + .../setuptools/_distutils/tests/test_spawn.py | 131 + .../_distutils/tests/test_sysconfig.py | 319 + .../_distutils/tests/test_text_file.py | 127 + .../_distutils/tests/test_unixccompiler.py | 351 + .../setuptools/_distutils/tests/test_util.py | 243 + .../_distutils/tests/test_version.py | 80 + .../_distutils/tests/test_versionpredicate.py | 0 .../_distutils/tests/unix_compat.py | 17 + .../setuptools/_distutils/text_file.py | 286 + .../setuptools/_distutils/unixccompiler.py | 402 + .../setuptools/_distutils/util.py | 505 + .../setuptools/_distutils/version.py | 349 + .../setuptools/_distutils/versionpredicate.py | 175 + .../setuptools/_distutils/zosccompiler.py | 229 + .../site-packages/setuptools/_entry_points.py | 90 + .../site-packages/setuptools/_imp.py | 87 + .../site-packages/setuptools/_importlib.py | 9 + .../site-packages/setuptools/_itertools.py | 23 + .../setuptools/_normalization.py | 144 + .../site-packages/setuptools/_path.py | 84 + .../site-packages/setuptools/_reqs.py | 42 + .../site-packages/setuptools/_shutil.py | 53 + .../autocommand-2.2.2.dist-info/INSTALLER | 1 + .../autocommand-2.2.2.dist-info/LICENSE | 166 + .../autocommand-2.2.2.dist-info/METADATA | 420 + .../autocommand-2.2.2.dist-info/RECORD | 18 + .../_vendor/autocommand-2.2.2.dist-info/WHEEL | 5 + .../autocommand-2.2.2.dist-info/top_level.txt | 1 + .../_vendor/autocommand/__init__.py | 27 + .../_vendor/autocommand/autoasync.py | 142 + .../_vendor/autocommand/autocommand.py | 70 + .../_vendor/autocommand/automain.py | 59 + .../_vendor/autocommand/autoparse.py | 333 + .../setuptools/_vendor/autocommand/errors.py | 23 + .../INSTALLER | 1 + .../backports.tarfile-1.2.0.dist-info/LICENSE | 17 + .../METADATA | 46 + .../backports.tarfile-1.2.0.dist-info/RECORD | 17 + .../REQUESTED | 0 .../backports.tarfile-1.2.0.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../setuptools/_vendor/backports/__init__.py | 1 + .../_vendor/backports/tarfile/__init__.py | 2937 ++++++ .../_vendor/backports/tarfile/__main__.py | 5 + .../backports/tarfile/compat/__init__.py | 0 .../_vendor/backports/tarfile/compat/py38.py | 24 + .../INSTALLER | 1 + .../LICENSE | 202 + .../METADATA | 129 + .../importlib_metadata-8.0.0.dist-info/RECORD | 32 + .../REQUESTED | 0 .../importlib_metadata-8.0.0.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../_vendor/importlib_metadata/__init__.py | 1083 ++ .../_vendor/importlib_metadata/_adapters.py | 83 + .../importlib_metadata/_collections.py | 30 + .../_vendor/importlib_metadata/_compat.py | 57 + .../_vendor/importlib_metadata/_functools.py | 104 + .../_vendor/importlib_metadata/_itertools.py | 73 + .../_vendor/importlib_metadata/_meta.py | 67 + .../_vendor/importlib_metadata/_text.py | 99 + .../importlib_metadata/compat/__init__.py | 0 .../importlib_metadata/compat/py311.py | 22 + .../_vendor/importlib_metadata/compat/py39.py | 36 + .../_vendor/importlib_metadata/diagnose.py | 21 + .../_vendor/importlib_metadata/py.typed | 0 .../_vendor/inflect-7.3.1.dist-info/INSTALLER | 1 + .../_vendor/inflect-7.3.1.dist-info/LICENSE | 17 + .../_vendor/inflect-7.3.1.dist-info/METADATA | 591 ++ .../_vendor/inflect-7.3.1.dist-info/RECORD | 13 + .../_vendor/inflect-7.3.1.dist-info/WHEEL | 5 + .../inflect-7.3.1.dist-info/top_level.txt | 1 + .../setuptools/_vendor/inflect/__init__.py | 3986 ++++++++ .../_vendor/inflect/compat/__init__.py | 0 .../setuptools/_vendor/inflect/compat/py38.py | 7 + .../setuptools/_vendor/inflect/py.typed | 0 .../INSTALLER | 1 + .../LICENSE | 17 + .../METADATA | 85 + .../jaraco.collections-5.1.0.dist-info/RECORD | 10 + .../REQUESTED | 0 .../jaraco.collections-5.1.0.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../jaraco.context-5.3.0.dist-info/INSTALLER | 1 + .../jaraco.context-5.3.0.dist-info/LICENSE | 17 + .../jaraco.context-5.3.0.dist-info/METADATA | 75 + .../jaraco.context-5.3.0.dist-info/RECORD | 8 + .../jaraco.context-5.3.0.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../INSTALLER | 1 + .../jaraco.functools-4.0.1.dist-info/LICENSE | 17 + .../jaraco.functools-4.0.1.dist-info/METADATA | 64 + .../jaraco.functools-4.0.1.dist-info/RECORD | 10 + .../jaraco.functools-4.0.1.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../jaraco.text-3.12.1.dist-info/INSTALLER | 1 + .../jaraco.text-3.12.1.dist-info/LICENSE | 17 + .../jaraco.text-3.12.1.dist-info/METADATA | 95 + .../jaraco.text-3.12.1.dist-info/RECORD | 20 + .../jaraco.text-3.12.1.dist-info/REQUESTED | 0 .../jaraco.text-3.12.1.dist-info/WHEEL | 5 + .../top_level.txt | 1 + .../_vendor/jaraco/collections/__init__.py | 1091 +++ .../_vendor/jaraco/collections/py.typed | 0 .../setuptools/_vendor/jaraco/context.py | 361 + .../_vendor/jaraco/functools/__init__.py | 633 ++ .../_vendor/jaraco/functools/__init__.pyi | 125 + .../_vendor/jaraco/functools/py.typed | 0 .../_vendor/jaraco/text/Lorem ipsum.txt | 2 + .../_vendor/jaraco/text/__init__.py | 624 ++ .../setuptools/_vendor/jaraco/text/layouts.py | 25 + .../_vendor/jaraco/text/show-newlines.py | 33 + .../_vendor/jaraco/text/strip-prefix.py | 21 + .../_vendor/jaraco/text/to-dvorak.py | 6 + .../_vendor/jaraco/text/to-qwerty.py | 6 + .../more_itertools-10.3.0.dist-info/INSTALLER | 1 + .../more_itertools-10.3.0.dist-info/LICENSE | 19 + .../more_itertools-10.3.0.dist-info/METADATA | 266 + .../more_itertools-10.3.0.dist-info/RECORD | 16 + .../more_itertools-10.3.0.dist-info/REQUESTED | 0 .../more_itertools-10.3.0.dist-info/WHEEL | 4 + .../_vendor/more_itertools/__init__.py | 6 + .../_vendor/more_itertools/__init__.pyi | 2 + .../setuptools/_vendor/more_itertools/more.py | 4806 +++++++++ .../_vendor/more_itertools/more.pyi | 709 ++ .../_vendor/more_itertools/py.typed | 0 .../_vendor/more_itertools/recipes.py | 1046 ++ .../_vendor/more_itertools/recipes.pyi | 136 + .../packaging-24.2.dist-info/INSTALLER | 1 + .../_vendor/packaging-24.2.dist-info/LICENSE | 3 + .../packaging-24.2.dist-info/LICENSE.APACHE | 177 + .../packaging-24.2.dist-info/LICENSE.BSD | 23 + .../_vendor/packaging-24.2.dist-info/METADATA | 102 + .../_vendor/packaging-24.2.dist-info/RECORD | 25 + .../packaging-24.2.dist-info/REQUESTED | 0 .../_vendor/packaging-24.2.dist-info/WHEEL | 4 + .../setuptools/_vendor/packaging/__init__.py | 15 + .../setuptools/_vendor/packaging/_elffile.py | 110 + .../_vendor/packaging/_manylinux.py | 263 + .../_vendor/packaging/_musllinux.py | 85 + .../setuptools/_vendor/packaging/_parser.py | 354 + .../_vendor/packaging/_structures.py | 61 + .../_vendor/packaging/_tokenizer.py | 194 + .../_vendor/packaging/licenses/__init__.py | 145 + .../_vendor/packaging/licenses/_spdx.py | 759 ++ .../setuptools/_vendor/packaging/markers.py | 331 + .../setuptools/_vendor/packaging/metadata.py | 863 ++ .../setuptools/_vendor/packaging/py.typed | 0 .../_vendor/packaging/requirements.py | 91 + .../_vendor/packaging/specifiers.py | 1020 ++ .../setuptools/_vendor/packaging/tags.py | 617 ++ .../setuptools/_vendor/packaging/utils.py | 163 + .../setuptools/_vendor/packaging/version.py | 582 ++ .../platformdirs-4.2.2.dist-info/INSTALLER | 1 + .../platformdirs-4.2.2.dist-info/METADATA | 319 + .../platformdirs-4.2.2.dist-info/RECORD | 23 + .../platformdirs-4.2.2.dist-info/REQUESTED | 0 .../platformdirs-4.2.2.dist-info/WHEEL | 4 + .../licenses/LICENSE | 21 + .../_vendor/platformdirs/__init__.py | 627 ++ .../_vendor/platformdirs/__main__.py | 55 + .../_vendor/platformdirs/android.py | 249 + .../setuptools/_vendor/platformdirs/api.py | 292 + .../setuptools/_vendor/platformdirs/macos.py | 130 + .../setuptools/_vendor/platformdirs/py.typed | 0 .../setuptools/_vendor/platformdirs/unix.py | 275 + .../_vendor/platformdirs/version.py | 16 + .../_vendor/platformdirs/windows.py | 272 + .../setuptools/_vendor/ruff.toml | 1 + .../_vendor/tomli-2.0.1.dist-info/INSTALLER | 1 + .../_vendor/tomli-2.0.1.dist-info/LICENSE | 21 + .../_vendor/tomli-2.0.1.dist-info/METADATA | 206 + .../_vendor/tomli-2.0.1.dist-info/RECORD | 15 + .../_vendor/tomli-2.0.1.dist-info/REQUESTED | 0 .../_vendor/tomli-2.0.1.dist-info/WHEEL | 4 + .../setuptools/_vendor/tomli/__init__.py | 11 + .../setuptools/_vendor/tomli/_parser.py | 691 ++ .../setuptools/_vendor/tomli/_re.py | 107 + .../setuptools/_vendor/tomli/_types.py | 10 + .../setuptools/_vendor/tomli/py.typed | 1 + .../typeguard-4.3.0.dist-info/INSTALLER | 1 + .../_vendor/typeguard-4.3.0.dist-info/LICENSE | 19 + .../typeguard-4.3.0.dist-info/METADATA | 81 + .../_vendor/typeguard-4.3.0.dist-info/RECORD | 34 + .../_vendor/typeguard-4.3.0.dist-info/WHEEL | 5 + .../entry_points.txt | 2 + .../typeguard-4.3.0.dist-info/top_level.txt | 1 + .../setuptools/_vendor/typeguard/__init__.py | 48 + .../setuptools/_vendor/typeguard/_checkers.py | 993 ++ .../setuptools/_vendor/typeguard/_config.py | 108 + .../_vendor/typeguard/_decorators.py | 235 + .../_vendor/typeguard/_exceptions.py | 42 + .../_vendor/typeguard/_functions.py | 308 + .../_vendor/typeguard/_importhook.py | 213 + .../setuptools/_vendor/typeguard/_memo.py | 48 + .../_vendor/typeguard/_pytest_plugin.py | 127 + .../_vendor/typeguard/_suppression.py | 86 + .../_vendor/typeguard/_transformer.py | 1229 +++ .../_vendor/typeguard/_union_transformer.py | 55 + .../setuptools/_vendor/typeguard/_utils.py | 173 + .../setuptools/_vendor/typeguard/py.typed | 0 .../INSTALLER | 1 + .../LICENSE | 279 + .../METADATA | 67 + .../typing_extensions-4.12.2.dist-info/RECORD | 7 + .../typing_extensions-4.12.2.dist-info/WHEEL | 4 + .../setuptools/_vendor/typing_extensions.py | 3641 +++++++ .../_vendor/wheel-0.43.0.dist-info/INSTALLER | 1 + .../wheel-0.43.0.dist-info/LICENSE.txt | 21 + .../_vendor/wheel-0.43.0.dist-info/METADATA | 61 + .../_vendor/wheel-0.43.0.dist-info/RECORD | 63 + .../_vendor/wheel-0.43.0.dist-info/REQUESTED | 0 .../_vendor/wheel-0.43.0.dist-info/WHEEL | 4 + .../wheel-0.43.0.dist-info/entry_points.txt | 6 + .../setuptools/_vendor/wheel/__init__.py | 3 + .../setuptools/_vendor/wheel/__main__.py | 23 + .../_vendor/wheel/_setuptools_logging.py | 26 + .../setuptools/_vendor/wheel/bdist_wheel.py | 595 ++ .../setuptools/_vendor/wheel/cli/__init__.py | 155 + .../setuptools/_vendor/wheel/cli/convert.py | 273 + .../setuptools/_vendor/wheel/cli/pack.py | 85 + .../setuptools/_vendor/wheel/cli/tags.py | 139 + .../setuptools/_vendor/wheel/cli/unpack.py | 30 + .../_vendor/wheel/macosx_libfile.py | 469 + .../setuptools/_vendor/wheel/metadata.py | 180 + .../setuptools/_vendor/wheel/util.py | 26 + .../_vendor/wheel/vendored/__init__.py | 0 .../wheel/vendored/packaging/__init__.py | 0 .../wheel/vendored/packaging/_elffile.py | 108 + .../wheel/vendored/packaging/_manylinux.py | 260 + .../wheel/vendored/packaging/_musllinux.py | 83 + .../wheel/vendored/packaging/_parser.py | 356 + .../wheel/vendored/packaging/_structures.py | 61 + .../wheel/vendored/packaging/_tokenizer.py | 192 + .../wheel/vendored/packaging/markers.py | 253 + .../wheel/vendored/packaging/requirements.py | 90 + .../wheel/vendored/packaging/specifiers.py | 1011 ++ .../_vendor/wheel/vendored/packaging/tags.py | 571 ++ .../_vendor/wheel/vendored/packaging/utils.py | 172 + .../wheel/vendored/packaging/version.py | 561 ++ .../_vendor/wheel/vendored/vendor.txt | 1 + .../setuptools/_vendor/wheel/wheelfile.py | 196 + .../_vendor/zipp-3.19.2.dist-info/INSTALLER | 1 + .../_vendor/zipp-3.19.2.dist-info/LICENSE | 17 + .../_vendor/zipp-3.19.2.dist-info/METADATA | 102 + .../_vendor/zipp-3.19.2.dist-info/RECORD | 15 + .../_vendor/zipp-3.19.2.dist-info/REQUESTED | 0 .../_vendor/zipp-3.19.2.dist-info/WHEEL | 5 + .../zipp-3.19.2.dist-info/top_level.txt | 1 + .../setuptools/_vendor/zipp/__init__.py | 501 + .../_vendor/zipp/compat/__init__.py | 0 .../setuptools/_vendor/zipp/compat/py310.py | 11 + .../setuptools/_vendor/zipp/glob.py | 106 + .../site-packages/setuptools/archive_util.py | 219 + .../site-packages/setuptools/build_meta.py | 560 ++ .../setuptools/command/__init__.py | 21 + .../setuptools/command/_requirestxt.py | 131 + .../site-packages/setuptools/command/alias.py | 77 + .../setuptools/command/bdist_egg.py | 479 + .../setuptools/command/bdist_rpm.py | 42 + .../setuptools/command/bdist_wheel.py | 612 ++ .../site-packages/setuptools/command/build.py | 135 + .../setuptools/command/build_clib.py | 103 + .../setuptools/command/build_ext.py | 469 + .../setuptools/command/build_py.py | 400 + .../setuptools/command/develop.py | 195 + .../setuptools/command/dist_info.py | 103 + .../setuptools/command/easy_install.py | 2365 +++++ .../setuptools/command/editable_wheel.py | 925 ++ .../setuptools/command/egg_info.py | 721 ++ .../setuptools/command/install.py | 183 + .../setuptools/command/install_egg_info.py | 58 + .../setuptools/command/install_lib.py | 137 + .../setuptools/command/install_scripts.py | 73 + .../setuptools/command/launcher manifest.xml | 15 + .../setuptools/command/rotate.py | 65 + .../setuptools/command/saveopts.py | 21 + .../site-packages/setuptools/command/sdist.py | 217 + .../setuptools/command/setopt.py | 141 + .../site-packages/setuptools/command/test.py | 45 + .../setuptools/compat/__init__.py | 0 .../site-packages/setuptools/compat/py310.py | 9 + .../site-packages/setuptools/compat/py311.py | 27 + .../site-packages/setuptools/compat/py312.py | 13 + .../site-packages/setuptools/compat/py39.py | 9 + .../site-packages/setuptools/config/NOTICE | 10 + .../setuptools/config/__init__.py | 43 + .../setuptools/config/_apply_pyprojecttoml.py | 462 + .../config/_validate_pyproject/NOTICE | 438 + .../config/_validate_pyproject/__init__.py | 34 + .../_validate_pyproject/error_reporting.py | 336 + .../_validate_pyproject/extra_validations.py | 52 + .../fastjsonschema_exceptions.py | 51 + .../fastjsonschema_validations.py | 1319 +++ .../config/_validate_pyproject/formats.py | 375 + .../setuptools/config/distutils.schema.json | 26 + .../site-packages/setuptools/config/expand.py | 449 + .../setuptools/config/pyprojecttoml.py | 468 + .../setuptools/config/setupcfg.py | 772 ++ .../setuptools/config/setuptools.schema.json | 433 + .../site-packages/setuptools/depends.py | 185 + .../site-packages/setuptools/discovery.py | 614 ++ .../site-packages/setuptools/dist.py | 1000 ++ .../site-packages/setuptools/errors.py | 67 + .../site-packages/setuptools/extension.py | 177 + .../site-packages/setuptools/glob.py | 185 + .../site-packages/setuptools/installer.py | 150 + .../site-packages/setuptools/launch.py | 36 + .../site-packages/setuptools/logging.py | 40 + .../site-packages/setuptools/modified.py | 18 + .../site-packages/setuptools/monkey.py | 126 + .../site-packages/setuptools/msvc.py | 1526 +++ .../site-packages/setuptools/namespaces.py | 106 + .../site-packages/setuptools/package_index.py | 1147 +++ .../site-packages/setuptools/sandbox.py | 536 + .../setuptools/script (dev).tmpl | 6 + .../site-packages/setuptools/script.tmpl | 3 + .../setuptools/tests/__init__.py | 13 + .../setuptools/tests/compat/__init__.py | 0 .../setuptools/tests/compat/py39.py | 3 + .../setuptools/tests/config/__init__.py | 0 .../tests/config/downloads/__init__.py | 59 + .../tests/config/downloads/preload.py | 18 + .../tests/config/setupcfg_examples.txt | 22 + .../tests/config/test_apply_pyprojecttoml.py | 512 + .../setuptools/tests/config/test_expand.py | 221 + .../tests/config/test_pyprojecttoml.py | 396 + .../config/test_pyprojecttoml_dynamic_deps.py | 109 + .../setuptools/tests/config/test_setupcfg.py | 965 ++ .../setuptools/tests/contexts.py | 145 + .../setuptools/tests/environment.py | 95 + .../setuptools/tests/fixtures.py | 157 + .../indexes/test_links_priority/external.html | 3 + .../simple/foobar/index.html | 4 + .../setuptools/tests/integration/__init__.py | 0 .../setuptools/tests/integration/helpers.py | 77 + .../integration/test_pip_install_sdist.py | 223 + .../setuptools/tests/mod_with_constant.py | 1 + .../setuptools/tests/namespaces.py | 90 + .../setuptools/tests/script-with-bom.py | 1 + .../site-packages/setuptools/tests/server.py | 86 + .../setuptools/tests/test_archive_util.py | 36 + .../tests/test_bdist_deprecations.py | 28 + .../setuptools/tests/test_bdist_egg.py | 73 + .../setuptools/tests/test_bdist_wheel.py | 623 ++ .../setuptools/tests/test_build.py | 33 + .../setuptools/tests/test_build_clib.py | 84 + .../setuptools/tests/test_build_ext.py | 291 + .../setuptools/tests/test_build_meta.py | 970 ++ .../setuptools/tests/test_build_py.py | 480 + .../setuptools/tests/test_config_discovery.py | 647 ++ .../setuptools/tests/test_core_metadata.py | 484 + .../setuptools/tests/test_depends.py | 15 + .../setuptools/tests/test_develop.py | 175 + .../setuptools/tests/test_dist.py | 278 + .../setuptools/tests/test_dist_info.py | 210 + .../tests/test_distutils_adoption.py | 198 + .../setuptools/tests/test_easy_install.py | 1474 +++ .../setuptools/tests/test_editable_install.py | 1289 +++ .../setuptools/tests/test_egg_info.py | 1285 +++ .../setuptools/tests/test_extern.py | 15 + .../setuptools/tests/test_find_packages.py | 218 + .../setuptools/tests/test_find_py_modules.py | 73 + .../setuptools/tests/test_glob.py | 45 + .../setuptools/tests/test_install_scripts.py | 89 + .../setuptools/tests/test_logging.py | 76 + .../setuptools/tests/test_manifest.py | 625 ++ .../setuptools/tests/test_namespaces.py | 138 + .../setuptools/tests/test_packageindex.py | 267 + .../setuptools/tests/test_sandbox.py | 134 + .../setuptools/tests/test_sdist.py | 975 ++ .../setuptools/tests/test_setopt.py | 40 + .../setuptools/tests/test_setuptools.py | 290 + .../setuptools/tests/test_shutil_wrapper.py | 23 + .../setuptools/tests/test_unicode_utils.py | 10 + .../setuptools/tests/test_virtualenv.py | 113 + .../setuptools/tests/test_warnings.py | 106 + .../setuptools/tests/test_wheel.py | 716 ++ .../setuptools/tests/test_windows_wrappers.py | 259 + .../site-packages/setuptools/tests/text.py | 4 + .../setuptools/tests/textwrap.py | 6 + .../site-packages/setuptools/unicode_utils.py | 102 + .../site-packages/setuptools/version.py | 6 + .../site-packages/setuptools/warnings.py | 110 + .../site-packages/setuptools/wheel.py | 236 + .../setuptools/windows_support.py | 30 + .../sniffio-1.3.1.dist-info/INSTALLER | 1 + .../sniffio-1.3.1.dist-info/LICENSE | 3 + .../sniffio-1.3.1.dist-info/LICENSE.APACHE2 | 202 + .../sniffio-1.3.1.dist-info/LICENSE.MIT | 20 + .../sniffio-1.3.1.dist-info/METADATA | 104 + .../sniffio-1.3.1.dist-info/RECORD | 19 + .../sniffio-1.3.1.dist-info/WHEEL | 5 + .../sniffio-1.3.1.dist-info/top_level.txt | 1 + .../site-packages/sniffio/__init__.py | 17 + lib/python3.11/site-packages/sniffio/_impl.py | 95 + .../site-packages/sniffio/_tests/__init__.py | 0 .../sniffio/_tests/test_sniffio.py | 84 + .../site-packages/sniffio/_version.py | 3 + lib/python3.11/site-packages/sniffio/py.typed | 0 .../socksio-1.0.0.dist-info/INSTALLER | 1 + .../socksio-1.0.0.dist-info/LICENSE | 21 + .../socksio-1.0.0.dist-info/METADATA | 182 + .../socksio-1.0.0.dist-info/RECORD | 20 + .../socksio-1.0.0.dist-info/WHEEL | 4 + .../site-packages/socksio/__init__.py | 46 + .../site-packages/socksio/_types.py | 3 + .../site-packages/socksio/compat.py | 86 + .../site-packages/socksio/exceptions.py | 6 + lib/python3.11/site-packages/socksio/py.typed | 0 .../site-packages/socksio/socks4.py | 253 + .../site-packages/socksio/socks5.py | 399 + lib/python3.11/site-packages/socksio/utils.py | 95 + .../soupsieve-2.6.dist-info/INSTALLER | 1 + .../soupsieve-2.6.dist-info/METADATA | 114 + .../soupsieve-2.6.dist-info/RECORD | 20 + .../soupsieve-2.6.dist-info/WHEEL | 4 + .../licenses/LICENSE.md | 21 + .../site-packages/soupsieve/__init__.py | 168 + .../site-packages/soupsieve/__meta__.py | 197 + .../site-packages/soupsieve/css_match.py | 1582 +++ .../site-packages/soupsieve/css_parser.py | 1289 +++ .../site-packages/soupsieve/css_types.py | 407 + .../site-packages/soupsieve/pretty.py | 139 + .../site-packages/soupsieve/py.typed | 0 .../site-packages/soupsieve/util.py | 117 + lib/python3.11/site-packages/twikit.egg-link | 2 + .../INSTALLER | 1 + .../LICENSE | 279 + .../METADATA | 67 + .../typing_extensions-4.12.2.dist-info/RECORD | 7 + .../typing_extensions-4.12.2.dist-info/WHEEL | 4 + .../site-packages/typing_extensions.py | 3641 +++++++ .../urllib3-2.3.0.dist-info/INSTALLER | 1 + .../urllib3-2.3.0.dist-info/METADATA | 154 + .../urllib3-2.3.0.dist-info/RECORD | 79 + .../urllib3-2.3.0.dist-info/WHEEL | 4 + .../licenses/LICENSE.txt | 21 + .../site-packages/urllib3/__init__.py | 211 + .../site-packages/urllib3/_base_connection.py | 165 + .../site-packages/urllib3/_collections.py | 479 + .../site-packages/urllib3/_request_methods.py | 278 + .../site-packages/urllib3/_version.py | 16 + .../site-packages/urllib3/connection.py | 1044 ++ .../site-packages/urllib3/connectionpool.py | 1178 +++ .../site-packages/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/emscripten/__init__.py | 16 + .../urllib3/contrib/emscripten/connection.py | 255 + .../emscripten/emscripten_fetch_worker.js | 110 + .../urllib3/contrib/emscripten/fetch.py | 708 ++ .../urllib3/contrib/emscripten/request.py | 22 + .../urllib3/contrib/emscripten/response.py | 285 + .../urllib3/contrib/pyopenssl.py | 554 ++ .../site-packages/urllib3/contrib/socks.py | 228 + .../site-packages/urllib3/exceptions.py | 327 + .../site-packages/urllib3/fields.py | 341 + .../site-packages/urllib3/filepost.py | 89 + .../site-packages/urllib3/http2/__init__.py | 53 + .../site-packages/urllib3/http2/connection.py | 356 + .../site-packages/urllib3/http2/probe.py | 87 + .../site-packages/urllib3/poolmanager.py | 637 ++ lib/python3.11/site-packages/urllib3/py.typed | 2 + .../site-packages/urllib3/response.py | 1278 +++ .../site-packages/urllib3/util/__init__.py | 42 + .../site-packages/urllib3/util/connection.py | 137 + .../site-packages/urllib3/util/proxy.py | 43 + .../site-packages/urllib3/util/request.py | 258 + .../site-packages/urllib3/util/response.py | 101 + .../site-packages/urllib3/util/retry.py | 533 + .../site-packages/urllib3/util/ssl_.py | 504 + .../urllib3/util/ssl_match_hostname.py | 159 + .../urllib3/util/ssltransport.py | 271 + .../site-packages/urllib3/util/timeout.py | 275 + .../site-packages/urllib3/util/url.py | 469 + .../site-packages/urllib3/util/util.py | 42 + .../site-packages/urllib3/util/wait.py | 124 + .../wheel-0.45.1.dist-info/INSTALLER | 1 + .../wheel-0.45.1.dist-info/LICENSE.txt | 21 + .../wheel-0.45.1.dist-info/METADATA | 66 + .../wheel-0.45.1.dist-info/RECORD | 76 + .../wheel-0.45.1.dist-info/WHEEL | 4 + .../wheel-0.45.1.dist-info/entry_points.txt | 6 + .../site-packages/wheel-0.45.1.virtualenv | 0 .../site-packages/wheel/__init__.py | 3 + .../site-packages/wheel/__main__.py | 23 + .../site-packages/wheel/_bdist_wheel.py | 613 ++ .../wheel/_setuptools_logging.py | 26 + .../site-packages/wheel/bdist_wheel.py | 26 + .../site-packages/wheel/cli/__init__.py | 155 + .../site-packages/wheel/cli/convert.py | 332 + .../site-packages/wheel/cli/pack.py | 85 + .../site-packages/wheel/cli/tags.py | 139 + .../site-packages/wheel/cli/unpack.py | 30 + .../site-packages/wheel/macosx_libfile.py | 482 + .../site-packages/wheel/metadata.py | 183 + lib/python3.11/site-packages/wheel/util.py | 17 + .../site-packages/wheel/vendored/__init__.py | 0 .../wheel/vendored/packaging/LICENSE | 3 + .../wheel/vendored/packaging/LICENSE.APACHE | 177 + .../wheel/vendored/packaging/LICENSE.BSD | 23 + .../wheel/vendored/packaging/__init__.py | 0 .../wheel/vendored/packaging/_elffile.py | 108 + .../wheel/vendored/packaging/_manylinux.py | 260 + .../wheel/vendored/packaging/_musllinux.py | 83 + .../wheel/vendored/packaging/_parser.py | 356 + .../wheel/vendored/packaging/_structures.py | 61 + .../wheel/vendored/packaging/_tokenizer.py | 192 + .../wheel/vendored/packaging/markers.py | 253 + .../wheel/vendored/packaging/requirements.py | 90 + .../wheel/vendored/packaging/specifiers.py | 1011 ++ .../wheel/vendored/packaging/tags.py | 571 ++ .../wheel/vendored/packaging/utils.py | 172 + .../wheel/vendored/packaging/version.py | 561 ++ .../site-packages/wheel/vendored/vendor.txt | 1 + .../site-packages/wheel/wheelfile.py | 227 + pyvenv.cfg | 8 + setup.py | 5 +- twikit.egg-info/PKG-INFO | 163 + twikit.egg-info/SOURCES.txt | 40 + twikit.egg-info/dependency_links.txt | 1 + twikit.egg-info/requires.txt | 5 + twikit.egg-info/top_level.txt | 1 + twikit/client/__init__.py | 4 + twikit/guest/tweet.py | 10 + twikit/streaming.py | 7 +- twikit/tweet.py | 10 + 1608 files changed, 427483 insertions(+), 308 deletions(-) create mode 100644 bin/activate create mode 100644 bin/activate.csh create mode 100644 bin/activate.fish create mode 100644 bin/activate.nu create mode 100644 bin/activate.ps1 create mode 100644 bin/activate_this.py create mode 100755 bin/filetype create mode 100755 bin/httpx create mode 100755 bin/normalizer create mode 100755 bin/pip create mode 100755 bin/pip-3.11 create mode 100755 bin/pip3 create mode 100755 bin/pip3.11 create mode 120000 bin/python create mode 120000 bin/python3 create mode 120000 bin/python3.11 create mode 100755 bin/wheel create mode 100755 bin/wheel-3.11 create mode 100755 bin/wheel3 create mode 100755 bin/wheel3.11 create mode 100644 build/lib/twikit/__init__.py create mode 100644 build/lib/twikit/_captcha/__init__.py create mode 100644 build/lib/twikit/_captcha/base.py create mode 100644 build/lib/twikit/_captcha/capsolver.py create mode 100644 build/lib/twikit/bookmark.py create mode 100644 build/lib/twikit/client/__init__.py create mode 100644 build/lib/twikit/client/client.py create mode 100644 build/lib/twikit/client/gql.py create mode 100644 build/lib/twikit/client/v11.py create mode 100644 build/lib/twikit/community.py create mode 100644 build/lib/twikit/constants.py create mode 100644 build/lib/twikit/errors.py create mode 100644 build/lib/twikit/geo.py create mode 100644 build/lib/twikit/group.py create mode 100644 build/lib/twikit/guest/__init__.py create mode 100644 build/lib/twikit/guest/client.py create mode 100644 build/lib/twikit/guest/tweet.py create mode 100644 build/lib/twikit/guest/user.py create mode 100644 build/lib/twikit/list.py create mode 100644 build/lib/twikit/message.py create mode 100644 build/lib/twikit/notification.py create mode 100644 build/lib/twikit/streaming.py create mode 100644 build/lib/twikit/trend.py create mode 100644 build/lib/twikit/tweet.py create mode 100644 build/lib/twikit/user.py create mode 100644 build/lib/twikit/utils.py create mode 100644 build/lib/twikit/x_client_transaction/__init__.py create mode 100644 build/lib/twikit/x_client_transaction/cubic_curve.py create mode 100644 build/lib/twikit/x_client_transaction/interpolate.py create mode 100644 build/lib/twikit/x_client_transaction/rotation.py create mode 100644 build/lib/twikit/x_client_transaction/transaction.py create mode 100644 build/lib/twikit/x_client_transaction/utils.py create mode 100644 example_project/test_twikit.py delete mode 100644 examples/delete_all_tweets.py delete mode 100644 examples/dm_auto_reply.py delete mode 100644 examples/download_tweet_media.py delete mode 100644 examples/example.py delete mode 100644 examples/guest.py delete mode 100644 examples/listen_for_new_tweets.py create mode 100644 lib/python3.11/site-packages/_distutils_hack/__init__.py create mode 100644 lib/python3.11/site-packages/_distutils_hack/override.py create mode 100644 lib/python3.11/site-packages/_virtualenv.pth create mode 100644 lib/python3.11/site-packages/_virtualenv.py create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/anyio-4.8.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/anyio/__init__.py create mode 100644 lib/python3.11/site-packages/anyio/_backends/__init__.py create mode 100644 lib/python3.11/site-packages/anyio/_backends/_asyncio.py create mode 100644 lib/python3.11/site-packages/anyio/_backends/_trio.py create mode 100644 lib/python3.11/site-packages/anyio/_core/__init__.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_eventloop.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_exceptions.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_fileio.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_resources.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_signals.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_sockets.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_streams.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_subprocesses.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_synchronization.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_tasks.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_testing.py create mode 100644 lib/python3.11/site-packages/anyio/_core/_typedattr.py create mode 100644 lib/python3.11/site-packages/anyio/abc/__init__.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_eventloop.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_resources.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_sockets.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_streams.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_subprocesses.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_tasks.py create mode 100644 lib/python3.11/site-packages/anyio/abc/_testing.py create mode 100644 lib/python3.11/site-packages/anyio/from_thread.py create mode 100644 lib/python3.11/site-packages/anyio/lowlevel.py create mode 100644 lib/python3.11/site-packages/anyio/py.typed create mode 100644 lib/python3.11/site-packages/anyio/pytest_plugin.py create mode 100644 lib/python3.11/site-packages/anyio/streams/__init__.py create mode 100644 lib/python3.11/site-packages/anyio/streams/buffered.py create mode 100644 lib/python3.11/site-packages/anyio/streams/file.py create mode 100644 lib/python3.11/site-packages/anyio/streams/memory.py create mode 100644 lib/python3.11/site-packages/anyio/streams/stapled.py create mode 100644 lib/python3.11/site-packages/anyio/streams/text.py create mode 100644 lib/python3.11/site-packages/anyio/streams/tls.py create mode 100644 lib/python3.11/site-packages/anyio/to_interpreter.py create mode 100644 lib/python3.11/site-packages/anyio/to_process.py create mode 100644 lib/python3.11/site-packages/anyio/to_thread.py create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/AUTHORS create mode 100644 lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/LICENSE create mode 100644 lib/python3.11/site-packages/bs4/__init__.py create mode 100644 lib/python3.11/site-packages/bs4/builder/__init__.py create mode 100644 lib/python3.11/site-packages/bs4/builder/_html5lib.py create mode 100644 lib/python3.11/site-packages/bs4/builder/_htmlparser.py create mode 100644 lib/python3.11/site-packages/bs4/builder/_lxml.py create mode 100644 lib/python3.11/site-packages/bs4/css.py create mode 100644 lib/python3.11/site-packages/bs4/dammit.py create mode 100644 lib/python3.11/site-packages/bs4/diagnose.py create mode 100644 lib/python3.11/site-packages/bs4/element.py create mode 100644 lib/python3.11/site-packages/bs4/formatter.py create mode 100644 lib/python3.11/site-packages/bs4/tests/__init__.py create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4818336571064320.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4999465949331456.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5000587759190016.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5167584867909632.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5270998950477824.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5492400320282624.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6124268085182464.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6306874195312640.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6600557255327744.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/fuzz/crash-ffbdfa8a2b26f13537b68d3794b0478a4090ee4a.testcase create mode 100644 lib/python3.11/site-packages/bs4/tests/test_builder.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_builder_registry.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_css.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_dammit.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_docs.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_element.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_formatter.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_fuzz.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_html5lib.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_htmlparser.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_lxml.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_navigablestring.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_pageelement.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_soup.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_tag.py create mode 100644 lib/python3.11/site-packages/bs4/tests/test_tree.py create mode 100644 lib/python3.11/site-packages/certifi-2024.12.14.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/certifi-2024.12.14.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/certifi-2024.12.14.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/certifi-2024.12.14.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/certifi-2024.12.14.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/certifi-2024.12.14.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/certifi/__init__.py create mode 100644 lib/python3.11/site-packages/certifi/__main__.py create mode 100644 lib/python3.11/site-packages/certifi/cacert.pem create mode 100644 lib/python3.11/site-packages/certifi/core.py create mode 100644 lib/python3.11/site-packages/certifi/py.typed create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/charset_normalizer/__init__.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/__main__.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/api.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/cd.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/cli/__init__.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/cli/__main__.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/constant.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/legacy.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/md.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/models.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/py.typed create mode 100644 lib/python3.11/site-packages/charset_normalizer/utils.py create mode 100644 lib/python3.11/site-packages/charset_normalizer/version.py create mode 100644 lib/python3.11/site-packages/distutils-precedence.pth create mode 100644 lib/python3.11/site-packages/easy-install.pth create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/filetype-1.2.0.dist-info/zip-safe create mode 100644 lib/python3.11/site-packages/filetype/__init__.py create mode 100644 lib/python3.11/site-packages/filetype/__main__.py create mode 100644 lib/python3.11/site-packages/filetype/filetype.py create mode 100644 lib/python3.11/site-packages/filetype/helpers.py create mode 100644 lib/python3.11/site-packages/filetype/match.py create mode 100644 lib/python3.11/site-packages/filetype/types/__init__.py create mode 100644 lib/python3.11/site-packages/filetype/types/application.py create mode 100644 lib/python3.11/site-packages/filetype/types/archive.py create mode 100644 lib/python3.11/site-packages/filetype/types/audio.py create mode 100644 lib/python3.11/site-packages/filetype/types/base.py create mode 100644 lib/python3.11/site-packages/filetype/types/document.py create mode 100644 lib/python3.11/site-packages/filetype/types/font.py create mode 100644 lib/python3.11/site-packages/filetype/types/image.py create mode 100644 lib/python3.11/site-packages/filetype/types/isobmff.py create mode 100644 lib/python3.11/site-packages/filetype/types/video.py create mode 100644 lib/python3.11/site-packages/filetype/utils.py create mode 100644 lib/python3.11/site-packages/h11-0.14.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/h11-0.14.0.dist-info/LICENSE.txt create mode 100644 lib/python3.11/site-packages/h11-0.14.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/h11-0.14.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/h11-0.14.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/h11-0.14.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/h11/__init__.py create mode 100644 lib/python3.11/site-packages/h11/_abnf.py create mode 100644 lib/python3.11/site-packages/h11/_connection.py create mode 100644 lib/python3.11/site-packages/h11/_events.py create mode 100644 lib/python3.11/site-packages/h11/_headers.py create mode 100644 lib/python3.11/site-packages/h11/_readers.py create mode 100644 lib/python3.11/site-packages/h11/_receivebuffer.py create mode 100644 lib/python3.11/site-packages/h11/_state.py create mode 100644 lib/python3.11/site-packages/h11/_util.py create mode 100644 lib/python3.11/site-packages/h11/_version.py create mode 100644 lib/python3.11/site-packages/h11/_writers.py create mode 100644 lib/python3.11/site-packages/h11/py.typed create mode 100644 lib/python3.11/site-packages/h11/tests/__init__.py create mode 100644 lib/python3.11/site-packages/h11/tests/data/test-file create mode 100644 lib/python3.11/site-packages/h11/tests/helpers.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_against_stdlib_http.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_connection.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_events.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_headers.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_helpers.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_io.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_receivebuffer.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_state.py create mode 100644 lib/python3.11/site-packages/h11/tests/test_util.py create mode 100644 lib/python3.11/site-packages/httpcore-1.0.7.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/httpcore-1.0.7.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/httpcore-1.0.7.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/httpcore-1.0.7.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/httpcore-1.0.7.dist-info/licenses/LICENSE.md create mode 100644 lib/python3.11/site-packages/httpcore/__init__.py create mode 100644 lib/python3.11/site-packages/httpcore/_api.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/__init__.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/connection.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/connection_pool.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/http11.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/http2.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/http_proxy.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/interfaces.py create mode 100644 lib/python3.11/site-packages/httpcore/_async/socks_proxy.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/__init__.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/anyio.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/auto.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/base.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/mock.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/sync.py create mode 100644 lib/python3.11/site-packages/httpcore/_backends/trio.py create mode 100644 lib/python3.11/site-packages/httpcore/_exceptions.py create mode 100644 lib/python3.11/site-packages/httpcore/_models.py create mode 100644 lib/python3.11/site-packages/httpcore/_ssl.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/__init__.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/connection.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/connection_pool.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/http11.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/http2.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/http_proxy.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/interfaces.py create mode 100644 lib/python3.11/site-packages/httpcore/_sync/socks_proxy.py create mode 100644 lib/python3.11/site-packages/httpcore/_synchronization.py create mode 100644 lib/python3.11/site-packages/httpcore/_trace.py create mode 100644 lib/python3.11/site-packages/httpcore/_utils.py create mode 100644 lib/python3.11/site-packages/httpcore/py.typed create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md create mode 100644 lib/python3.11/site-packages/httpx/__init__.py create mode 100644 lib/python3.11/site-packages/httpx/__version__.py create mode 100644 lib/python3.11/site-packages/httpx/_api.py create mode 100644 lib/python3.11/site-packages/httpx/_auth.py create mode 100644 lib/python3.11/site-packages/httpx/_client.py create mode 100644 lib/python3.11/site-packages/httpx/_config.py create mode 100644 lib/python3.11/site-packages/httpx/_content.py create mode 100644 lib/python3.11/site-packages/httpx/_decoders.py create mode 100644 lib/python3.11/site-packages/httpx/_exceptions.py create mode 100644 lib/python3.11/site-packages/httpx/_main.py create mode 100644 lib/python3.11/site-packages/httpx/_models.py create mode 100644 lib/python3.11/site-packages/httpx/_multipart.py create mode 100644 lib/python3.11/site-packages/httpx/_status_codes.py create mode 100644 lib/python3.11/site-packages/httpx/_transports/__init__.py create mode 100644 lib/python3.11/site-packages/httpx/_transports/asgi.py create mode 100644 lib/python3.11/site-packages/httpx/_transports/base.py create mode 100644 lib/python3.11/site-packages/httpx/_transports/default.py create mode 100644 lib/python3.11/site-packages/httpx/_transports/mock.py create mode 100644 lib/python3.11/site-packages/httpx/_transports/wsgi.py create mode 100644 lib/python3.11/site-packages/httpx/_types.py create mode 100644 lib/python3.11/site-packages/httpx/_urlparse.py create mode 100644 lib/python3.11/site-packages/httpx/_urls.py create mode 100644 lib/python3.11/site-packages/httpx/_utils.py create mode 100644 lib/python3.11/site-packages/httpx/py.typed create mode 100644 lib/python3.11/site-packages/idna-3.10.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/idna-3.10.dist-info/LICENSE.md create mode 100644 lib/python3.11/site-packages/idna-3.10.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/idna-3.10.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/idna-3.10.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/idna/__init__.py create mode 100644 lib/python3.11/site-packages/idna/codec.py create mode 100644 lib/python3.11/site-packages/idna/compat.py create mode 100644 lib/python3.11/site-packages/idna/core.py create mode 100644 lib/python3.11/site-packages/idna/idnadata.py create mode 100644 lib/python3.11/site-packages/idna/intranges.py create mode 100644 lib/python3.11/site-packages/idna/package_data.py create mode 100644 lib/python3.11/site-packages/idna/py.typed create mode 100644 lib/python3.11/site-packages/idna/uts46data.py create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/LICENSE.txt create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/LICENSES.txt create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/lxml-5.3.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/lxml/ElementInclude.py create mode 100644 lib/python3.11/site-packages/lxml/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/_elementpath.py create mode 100644 lib/python3.11/site-packages/lxml/apihelpers.pxi create mode 100644 lib/python3.11/site-packages/lxml/builder.py create mode 100644 lib/python3.11/site-packages/lxml/classlookup.pxi create mode 100644 lib/python3.11/site-packages/lxml/cleanup.pxi create mode 100644 lib/python3.11/site-packages/lxml/cssselect.py create mode 100644 lib/python3.11/site-packages/lxml/debug.pxi create mode 100644 lib/python3.11/site-packages/lxml/docloader.pxi create mode 100644 lib/python3.11/site-packages/lxml/doctestcompare.py create mode 100644 lib/python3.11/site-packages/lxml/dtd.pxi create mode 100644 lib/python3.11/site-packages/lxml/etree.h create mode 100644 lib/python3.11/site-packages/lxml/etree.pyx create mode 100644 lib/python3.11/site-packages/lxml/etree_api.h create mode 100644 lib/python3.11/site-packages/lxml/extensions.pxi create mode 100644 lib/python3.11/site-packages/lxml/html/ElementSoup.py create mode 100644 lib/python3.11/site-packages/lxml/html/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/html/_diffcommand.py create mode 100644 lib/python3.11/site-packages/lxml/html/_html5builder.py create mode 100644 lib/python3.11/site-packages/lxml/html/_setmixin.py create mode 100644 lib/python3.11/site-packages/lxml/html/builder.py create mode 100644 lib/python3.11/site-packages/lxml/html/clean.py create mode 100644 lib/python3.11/site-packages/lxml/html/defs.py create mode 100644 lib/python3.11/site-packages/lxml/html/diff.py create mode 100644 lib/python3.11/site-packages/lxml/html/formfill.py create mode 100644 lib/python3.11/site-packages/lxml/html/html5parser.py create mode 100644 lib/python3.11/site-packages/lxml/html/soupparser.py create mode 100644 lib/python3.11/site-packages/lxml/html/usedoctest.py create mode 100644 lib/python3.11/site-packages/lxml/includes/__init__.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/includes/c14n.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/config.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/dtdvalid.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/etree_defs.h create mode 100644 lib/python3.11/site-packages/lxml/includes/etreepublic.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/extlibs/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/includes/extlibs/libcharset.h create mode 100644 lib/python3.11/site-packages/lxml/includes/extlibs/localcharset.h create mode 100644 lib/python3.11/site-packages/lxml/includes/extlibs/zconf.h create mode 100644 lib/python3.11/site-packages/lxml/includes/extlibs/zlib.h create mode 100644 lib/python3.11/site-packages/lxml/includes/htmlparser.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/libexslt/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/includes/libexslt/exslt.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libexslt/exsltconfig.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libexslt/exsltexports.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/HTMLparser.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/HTMLtree.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/SAX.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/SAX2.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/c14n.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/catalog.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/chvalid.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/debugXML.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/dict.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/encoding.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/entities.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/globals.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/hash.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/list.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/nanoftp.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/nanohttp.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/parser.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/parserInternals.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/relaxng.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/schemasInternals.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/schematron.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/threads.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/tree.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/uri.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/valid.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xinclude.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xlink.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlIO.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlautomata.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlerror.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlexports.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlmemory.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlmodule.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlreader.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlregexp.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlsave.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlschemas.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlschemastypes.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlstring.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlunicode.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlversion.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xmlwriter.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xpath.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xpathInternals.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxml/xpointer.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/attributes.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/documents.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/extensions.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/extra.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/functions.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/imports.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/keys.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/namespaces.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/numbersInternals.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/pattern.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/preproc.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/security.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/templates.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/transform.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/variables.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/xslt.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/xsltInternals.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/xsltconfig.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/xsltexports.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/xsltlocale.h create mode 100644 lib/python3.11/site-packages/lxml/includes/libxslt/xsltutils.h create mode 100644 lib/python3.11/site-packages/lxml/includes/lxml-version.h create mode 100644 lib/python3.11/site-packages/lxml/includes/relaxng.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/schematron.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/tree.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/uri.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/xinclude.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/xmlerror.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/xmlparser.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/xmlschema.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/xpath.pxd create mode 100644 lib/python3.11/site-packages/lxml/includes/xslt.pxd create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/__init__.py create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/rng/iso-schematron.rng create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/XSD2Schtrn.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_abstract_expand.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_dsdl_include.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_skeleton_for_xslt1.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_svrl_for_xslt1.xsl create mode 100644 lib/python3.11/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/readme.txt create mode 100644 lib/python3.11/site-packages/lxml/iterparse.pxi create mode 100644 lib/python3.11/site-packages/lxml/lxml.etree.h create mode 100644 lib/python3.11/site-packages/lxml/lxml.etree_api.h create mode 100644 lib/python3.11/site-packages/lxml/nsclasses.pxi create mode 100644 lib/python3.11/site-packages/lxml/objectify.pyx create mode 100644 lib/python3.11/site-packages/lxml/objectpath.pxi create mode 100644 lib/python3.11/site-packages/lxml/parser.pxi create mode 100644 lib/python3.11/site-packages/lxml/parsertarget.pxi create mode 100644 lib/python3.11/site-packages/lxml/proxy.pxi create mode 100644 lib/python3.11/site-packages/lxml/public-api.pxi create mode 100644 lib/python3.11/site-packages/lxml/pyclasslookup.py create mode 100644 lib/python3.11/site-packages/lxml/readonlytree.pxi create mode 100644 lib/python3.11/site-packages/lxml/relaxng.pxi create mode 100644 lib/python3.11/site-packages/lxml/sax.py create mode 100644 lib/python3.11/site-packages/lxml/saxparser.pxi create mode 100644 lib/python3.11/site-packages/lxml/schematron.pxi create mode 100644 lib/python3.11/site-packages/lxml/serializer.pxi create mode 100644 lib/python3.11/site-packages/lxml/usedoctest.py create mode 100644 lib/python3.11/site-packages/lxml/xinclude.pxi create mode 100644 lib/python3.11/site-packages/lxml/xmlerror.pxi create mode 100644 lib/python3.11/site-packages/lxml/xmlid.pxi create mode 100644 lib/python3.11/site-packages/lxml/xmlschema.pxi create mode 100644 lib/python3.11/site-packages/lxml/xpath.pxi create mode 100644 lib/python3.11/site-packages/lxml/xslt.pxi create mode 100644 lib/python3.11/site-packages/lxml/xsltext.pxi create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/AUTHORS.txt create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/LICENSE.txt create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/pip-24.3.1.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/pip-24.3.1.virtualenv create mode 100644 lib/python3.11/site-packages/pip/__init__.py create mode 100644 lib/python3.11/site-packages/pip/__main__.py create mode 100644 lib/python3.11/site-packages/pip/__pip-runner__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/build_env.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cache.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/base_command.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/command_context.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/index_command.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/main.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/main_parser.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/parser.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/req_command.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/spinners.py create mode 100644 lib/python3.11/site-packages/pip/_internal/cli/status_codes.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/cache.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/check.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/completion.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/configuration.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/debug.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/download.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/freeze.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/hash.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/help.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/index.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/inspect.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/install.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/list.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/search.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/show.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/uninstall.py create mode 100644 lib/python3.11/site-packages/pip/_internal/commands/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/configuration.py create mode 100644 lib/python3.11/site-packages/pip/_internal/distributions/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/distributions/base.py create mode 100644 lib/python3.11/site-packages/pip/_internal/distributions/installed.py create mode 100644 lib/python3.11/site-packages/pip/_internal/distributions/sdist.py create mode 100644 lib/python3.11/site-packages/pip/_internal/distributions/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/exceptions.py create mode 100644 lib/python3.11/site-packages/pip/_internal/index/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/index/collector.py create mode 100644 lib/python3.11/site-packages/pip/_internal/index/package_finder.py create mode 100644 lib/python3.11/site-packages/pip/_internal/index/sources.py create mode 100644 lib/python3.11/site-packages/pip/_internal/locations/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/locations/_distutils.py create mode 100644 lib/python3.11/site-packages/pip/_internal/locations/_sysconfig.py create mode 100644 lib/python3.11/site-packages/pip/_internal/locations/base.py create mode 100644 lib/python3.11/site-packages/pip/_internal/main.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/_json.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/base.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/importlib/_dists.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py create mode 100644 lib/python3.11/site-packages/pip/_internal/metadata/pkg_resources.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/candidate.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/direct_url.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/format_control.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/index.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/installation_report.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/link.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/scheme.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/search_scope.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/selection_prefs.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/target_python.py create mode 100644 lib/python3.11/site-packages/pip/_internal/models/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/auth.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/cache.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/download.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/lazy_wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/session.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/utils.py create mode 100644 lib/python3.11/site-packages/pip/_internal/network/xmlrpc.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/check.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/freeze.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/install/editable_legacy.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/install/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/operations/prepare.py create mode 100644 lib/python3.11/site-packages/pip/_internal/pyproject.py create mode 100644 lib/python3.11/site-packages/pip/_internal/req/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/req/constructors.py create mode 100644 lib/python3.11/site-packages/pip/_internal/req/req_file.py create mode 100644 lib/python3.11/site-packages/pip/_internal/req/req_install.py create mode 100644 lib/python3.11/site-packages/pip/_internal/req/req_set.py create mode 100644 lib/python3.11/site-packages/pip/_internal/req/req_uninstall.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/base.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py create mode 100644 lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py create mode 100644 lib/python3.11/site-packages/pip/_internal/self_outdated_check.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/_jaraco_text.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/_log.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/appdirs.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/compat.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/compatibility_tags.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/datetime.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/deprecation.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/direct_url_helpers.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/egg_link.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/encoding.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/entrypoints.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/filesystem.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/filetypes.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/glibc.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/hashes.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/logging.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/misc.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/packaging.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/retry.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/setuptools_build.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/subprocess.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/unpacking.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/urls.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py create mode 100644 lib/python3.11/site-packages/pip/_internal/utils/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_internal/vcs/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_internal/vcs/bazaar.py create mode 100644 lib/python3.11/site-packages/pip/_internal/vcs/git.py create mode 100644 lib/python3.11/site-packages/pip/_internal/vcs/mercurial.py create mode 100644 lib/python3.11/site-packages/pip/_internal/vcs/subversion.py create mode 100644 lib/python3.11/site-packages/pip/_internal/vcs/versioncontrol.py create mode 100644 lib/python3.11/site-packages/pip/_internal/wheel_builder.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/certifi/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/certifi/__main__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/certifi/cacert.pem create mode 100644 lib/python3.11/site-packages/pip/_vendor/certifi/core.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/certifi/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/compat.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/database.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/index.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/locators.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/markers.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/resources.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/util.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/version.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distro/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distro/__main__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distro/distro.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/distro/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/codec.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/compat.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/core.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/idnadata.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/intranges.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/package_data.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/idna/uts46data.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/msgpack/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/msgpack/exceptions.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/msgpack/ext.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/msgpack/fallback.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/_elffile.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/_manylinux.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/_musllinux.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/_parser.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/_tokenizer.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/markers.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/metadata.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/requirements.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/tags.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/utils.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/packaging/version.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pkg_resources/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/__main__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/android.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/api.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/macos.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/unix.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/version.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/platformdirs/windows.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/__main__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/cmdline.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/console.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/filter.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/filters/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatter.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/_mapping.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/bbcode.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/groff.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/html.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/img.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/irc.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/latex.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/other.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/pangomarkup.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/rtf.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/svg.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/terminal.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/formatters/terminal256.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/lexer.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/lexers/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/lexers/_mapping.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/lexers/python.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/modeline.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/plugin.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/regexopt.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/scanner.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/sphinxext.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/style.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/styles/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/styles/_mapping.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/token.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/unistring.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pygments/util.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_compat.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_impl.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/pyproject_hooks/_in_process/_in_process.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/__version__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/_internal_utils.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/adapters.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/api.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/auth.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/certs.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/compat.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/cookies.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/help.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/hooks.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/models.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/packages.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/sessions.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/structures.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/requests/utils.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/providers.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/reporters.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/resolvers.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/resolvelib/structs.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/__main__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_cell_widths.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_emoji_codes.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_emoji_replace.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_export_format.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_extension.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_fileno.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_inspect.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_log_render.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_loop.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_null_file.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_palettes.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_pick.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_ratio.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_spinners.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_stack.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_timer.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_win32_console.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_windows.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_windows_renderer.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/_wrap.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/abc.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/align.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/ansi.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/bar.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/box.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/cells.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/color.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/color_triplet.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/columns.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/console.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/constrain.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/containers.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/control.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/default_styles.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/diagnose.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/emoji.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/errors.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/file_proxy.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/filesize.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/highlighter.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/json.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/jupyter.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/layout.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/live.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/live_render.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/logging.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/markup.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/measure.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/padding.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/pager.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/palette.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/panel.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/pretty.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/progress.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/progress_bar.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/prompt.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/protocol.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/region.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/repr.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/rule.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/scope.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/screen.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/segment.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/spinner.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/status.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/style.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/styled.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/syntax.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/table.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/terminal_theme.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/text.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/theme.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/themes.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/traceback.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/rich/tree.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/tomli/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/tomli/_parser.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/tomli/_re.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/tomli/_types.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/tomli/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/_api.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/_macos.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/_openssl.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/_ssl_constants.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/_windows.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/truststore/py.typed create mode 100644 lib/python3.11/site-packages/pip/_vendor/typing_extensions.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/_collections.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/_version.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/connection.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/connectionpool.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/appengine.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/securetransport.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/contrib/socks.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/exceptions.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/fields.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/filepost.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/packages/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/packages/backports/weakref_finalize.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/packages/six.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/poolmanager.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/request.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/response.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/__init__.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/connection.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/proxy.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/queue.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/request.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/response.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/retry.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/ssl_.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/ssl_match_hostname.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/ssltransport.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/timeout.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/url.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/urllib3/util/wait.py create mode 100644 lib/python3.11/site-packages/pip/_vendor/vendor.txt create mode 100644 lib/python3.11/site-packages/pip/py.typed create mode 100644 lib/python3.11/site-packages/pkg_resources/__init__.py create mode 100644 lib/python3.11/site-packages/pkg_resources/api_tests.txt create mode 100644 lib/python3.11/site-packages/pkg_resources/py.typed create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/__init__.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package-source/setup.cfg create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/test_find_distributions.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/test_integration_zope_interface.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/test_markers.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/test_pkg_resources.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/test_resources.py create mode 100644 lib/python3.11/site-packages/pkg_resources/tests/test_working_set.py create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/pyotp-2.9.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/pyotp/__init__.py create mode 100644 lib/python3.11/site-packages/pyotp/compat.py create mode 100644 lib/python3.11/site-packages/pyotp/contrib/__init__.py create mode 100644 lib/python3.11/site-packages/pyotp/contrib/steam.py create mode 100644 lib/python3.11/site-packages/pyotp/hotp.py create mode 100644 lib/python3.11/site-packages/pyotp/otp.py create mode 100644 lib/python3.11/site-packages/pyotp/py.typed create mode 100644 lib/python3.11/site-packages/pyotp/totp.py create mode 100644 lib/python3.11/site-packages/pyotp/utils.py create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/requests-2.32.3.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/requests/__init__.py create mode 100644 lib/python3.11/site-packages/requests/__version__.py create mode 100644 lib/python3.11/site-packages/requests/_internal_utils.py create mode 100644 lib/python3.11/site-packages/requests/adapters.py create mode 100644 lib/python3.11/site-packages/requests/api.py create mode 100644 lib/python3.11/site-packages/requests/auth.py create mode 100644 lib/python3.11/site-packages/requests/certs.py create mode 100644 lib/python3.11/site-packages/requests/compat.py create mode 100644 lib/python3.11/site-packages/requests/cookies.py create mode 100644 lib/python3.11/site-packages/requests/exceptions.py create mode 100644 lib/python3.11/site-packages/requests/help.py create mode 100644 lib/python3.11/site-packages/requests/hooks.py create mode 100644 lib/python3.11/site-packages/requests/models.py create mode 100644 lib/python3.11/site-packages/requests/packages.py create mode 100644 lib/python3.11/site-packages/requests/sessions.py create mode 100644 lib/python3.11/site-packages/requests/status_codes.py create mode 100644 lib/python3.11/site-packages/requests/structures.py create mode 100644 lib/python3.11/site-packages/requests/utils.py create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools-75.6.0.virtualenv create mode 100644 lib/python3.11/site-packages/setuptools/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_core_metadata.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/_log.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/_macos_compat.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/_modified.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/_msvccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/archive_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/ccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/cmd.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/_framework_compat.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/bdist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/bdist_dumb.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/bdist_rpm.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/build.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/build_clib.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/build_ext.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/build_py.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/build_scripts.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/check.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/clean.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/config.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/install.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/install_data.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/install_egg_info.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/install_headers.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/install_lib.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/install_scripts.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/command/sdist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/compat/py38.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/compat/py39.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/core.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/cygwinccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/debug.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/dep_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/dir_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/dist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/errors.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/extension.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/fancy_getopt.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/file_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/filelist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/log.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/spawn.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/sysconfig.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/compat/py38.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/support.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_archive_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_bdist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_bdist_dumb.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_bdist_rpm.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_build.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_build_clib.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_build_ext.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_build_py.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_build_scripts.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_ccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_check.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_clean.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_cmd.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_config_cmd.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_core.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_cygwinccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_dir_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_dist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_extension.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_file_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_filelist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_install.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_install_data.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_install_headers.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_install_lib.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_install_scripts.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_log.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_mingwccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_modified.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_msvccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_sdist.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_spawn.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_sysconfig.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_text_file.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_unixccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_version.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/test_versionpredicate.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/tests/unix_compat.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/text_file.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/unixccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/util.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/version.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/versionpredicate.py create mode 100644 lib/python3.11/site-packages/setuptools/_distutils/zosccompiler.py create mode 100644 lib/python3.11/site-packages/setuptools/_entry_points.py create mode 100644 lib/python3.11/site-packages/setuptools/_imp.py create mode 100644 lib/python3.11/site-packages/setuptools/_importlib.py create mode 100644 lib/python3.11/site-packages/setuptools/_itertools.py create mode 100644 lib/python3.11/site-packages/setuptools/_normalization.py create mode 100644 lib/python3.11/site-packages/setuptools/_path.py create mode 100644 lib/python3.11/site-packages/setuptools/_reqs.py create mode 100644 lib/python3.11/site-packages/setuptools/_shutil.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand/autoasync.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand/autocommand.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand/automain.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand/autoparse.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/autocommand/errors.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports.tarfile-1.2.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports/tarfile/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports/tarfile/__main__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports/tarfile/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata-8.0.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_adapters.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_collections.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_compat.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_functools.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_itertools.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_meta.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/_text.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/compat/py311.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/compat/py39.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/diagnose.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/importlib_metadata/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect-7.3.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect-7.3.1.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect-7.3.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect-7.3.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect-7.3.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect-7.3.1.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect/compat/py38.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/inflect/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.functools-4.0.1.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco.text-3.12.1.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/collections/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/collections/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/context.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/functools/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/functools/__init__.pyi create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/functools/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/Lorem ipsum.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/layouts.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/show-newlines.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/strip-prefix.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/to-dvorak.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/to-qwerty.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/__init__.pyi create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/more.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/more.pyi create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/recipes.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/more_itertools/recipes.pyi create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging-24.2.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/_elffile.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/_manylinux.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/_musllinux.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/_parser.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/_structures.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/_tokenizer.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/licenses/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/licenses/_spdx.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/markers.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/metadata.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/requirements.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/specifiers.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/tags.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/utils.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/packaging/version.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs-4.2.2.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs-4.2.2.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs-4.2.2.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs-4.2.2.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs-4.2.2.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs-4.2.2.dist-info/licenses/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/__main__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/android.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/api.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/macos.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/unix.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/version.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/platformdirs/windows.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/ruff.toml create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli/_parser.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli/_re.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli/_types.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/tomli/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard-4.3.0.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_checkers.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_config.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_decorators.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_exceptions.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_functions.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_importhook.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_memo.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_pytest_plugin.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_suppression.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_transformer.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_union_transformer.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/_utils.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typeguard/py.typed create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typing_extensions-4.12.2.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typing_extensions-4.12.2.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typing_extensions-4.12.2.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typing_extensions-4.12.2.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typing_extensions-4.12.2.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/typing_extensions.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/LICENSE.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel-0.43.0.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/__main__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/bdist_wheel.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/cli/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/cli/convert.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/cli/pack.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/cli/tags.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/cli/unpack.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/macosx_libfile.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/metadata.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/util.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/vendored/vendor.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/wheel/wheelfile.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/REQUESTED create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp-3.19.2.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp/compat/py310.py create mode 100644 lib/python3.11/site-packages/setuptools/_vendor/zipp/glob.py create mode 100644 lib/python3.11/site-packages/setuptools/archive_util.py create mode 100644 lib/python3.11/site-packages/setuptools/build_meta.py create mode 100644 lib/python3.11/site-packages/setuptools/command/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/command/_requirestxt.py create mode 100644 lib/python3.11/site-packages/setuptools/command/alias.py create mode 100644 lib/python3.11/site-packages/setuptools/command/bdist_egg.py create mode 100644 lib/python3.11/site-packages/setuptools/command/bdist_rpm.py create mode 100644 lib/python3.11/site-packages/setuptools/command/bdist_wheel.py create mode 100644 lib/python3.11/site-packages/setuptools/command/build.py create mode 100644 lib/python3.11/site-packages/setuptools/command/build_clib.py create mode 100644 lib/python3.11/site-packages/setuptools/command/build_ext.py create mode 100644 lib/python3.11/site-packages/setuptools/command/build_py.py create mode 100644 lib/python3.11/site-packages/setuptools/command/develop.py create mode 100644 lib/python3.11/site-packages/setuptools/command/dist_info.py create mode 100644 lib/python3.11/site-packages/setuptools/command/easy_install.py create mode 100644 lib/python3.11/site-packages/setuptools/command/editable_wheel.py create mode 100644 lib/python3.11/site-packages/setuptools/command/egg_info.py create mode 100644 lib/python3.11/site-packages/setuptools/command/install.py create mode 100644 lib/python3.11/site-packages/setuptools/command/install_egg_info.py create mode 100644 lib/python3.11/site-packages/setuptools/command/install_lib.py create mode 100644 lib/python3.11/site-packages/setuptools/command/install_scripts.py create mode 100644 lib/python3.11/site-packages/setuptools/command/launcher manifest.xml create mode 100644 lib/python3.11/site-packages/setuptools/command/rotate.py create mode 100644 lib/python3.11/site-packages/setuptools/command/saveopts.py create mode 100644 lib/python3.11/site-packages/setuptools/command/sdist.py create mode 100644 lib/python3.11/site-packages/setuptools/command/setopt.py create mode 100644 lib/python3.11/site-packages/setuptools/command/test.py create mode 100644 lib/python3.11/site-packages/setuptools/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/compat/py310.py create mode 100644 lib/python3.11/site-packages/setuptools/compat/py311.py create mode 100644 lib/python3.11/site-packages/setuptools/compat/py312.py create mode 100644 lib/python3.11/site-packages/setuptools/compat/py39.py create mode 100644 lib/python3.11/site-packages/setuptools/config/NOTICE create mode 100644 lib/python3.11/site-packages/setuptools/config/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_apply_pyprojecttoml.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/NOTICE create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/error_reporting.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/extra_validations.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py create mode 100644 lib/python3.11/site-packages/setuptools/config/_validate_pyproject/formats.py create mode 100644 lib/python3.11/site-packages/setuptools/config/distutils.schema.json create mode 100644 lib/python3.11/site-packages/setuptools/config/expand.py create mode 100644 lib/python3.11/site-packages/setuptools/config/pyprojecttoml.py create mode 100644 lib/python3.11/site-packages/setuptools/config/setupcfg.py create mode 100644 lib/python3.11/site-packages/setuptools/config/setuptools.schema.json create mode 100644 lib/python3.11/site-packages/setuptools/depends.py create mode 100644 lib/python3.11/site-packages/setuptools/discovery.py create mode 100644 lib/python3.11/site-packages/setuptools/dist.py create mode 100644 lib/python3.11/site-packages/setuptools/errors.py create mode 100644 lib/python3.11/site-packages/setuptools/extension.py create mode 100644 lib/python3.11/site-packages/setuptools/glob.py create mode 100644 lib/python3.11/site-packages/setuptools/installer.py create mode 100644 lib/python3.11/site-packages/setuptools/launch.py create mode 100644 lib/python3.11/site-packages/setuptools/logging.py create mode 100644 lib/python3.11/site-packages/setuptools/modified.py create mode 100644 lib/python3.11/site-packages/setuptools/monkey.py create mode 100644 lib/python3.11/site-packages/setuptools/msvc.py create mode 100644 lib/python3.11/site-packages/setuptools/namespaces.py create mode 100644 lib/python3.11/site-packages/setuptools/package_index.py create mode 100644 lib/python3.11/site-packages/setuptools/sandbox.py create mode 100644 lib/python3.11/site-packages/setuptools/script (dev).tmpl create mode 100644 lib/python3.11/site-packages/setuptools/script.tmpl create mode 100644 lib/python3.11/site-packages/setuptools/tests/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/compat/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/compat/py39.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/downloads/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/downloads/preload.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/setupcfg_examples.txt create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/test_apply_pyprojecttoml.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/test_expand.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/test_pyprojecttoml.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/test_pyprojecttoml_dynamic_deps.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/config/test_setupcfg.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/contexts.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/environment.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/fixtures.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/indexes/test_links_priority/external.html create mode 100644 lib/python3.11/site-packages/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html create mode 100644 lib/python3.11/site-packages/setuptools/tests/integration/__init__.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/integration/helpers.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/integration/test_pip_install_sdist.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/mod_with_constant.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/namespaces.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/script-with-bom.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/server.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_archive_util.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_bdist_deprecations.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_bdist_egg.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_bdist_wheel.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_build.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_build_clib.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_build_ext.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_build_meta.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_build_py.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_config_discovery.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_core_metadata.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_depends.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_develop.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_dist.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_dist_info.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_distutils_adoption.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_easy_install.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_editable_install.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_egg_info.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_extern.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_find_packages.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_find_py_modules.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_glob.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_install_scripts.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_logging.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_manifest.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_namespaces.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_packageindex.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_sandbox.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_sdist.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_setopt.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_setuptools.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_shutil_wrapper.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_unicode_utils.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_virtualenv.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_warnings.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_wheel.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/test_windows_wrappers.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/text.py create mode 100644 lib/python3.11/site-packages/setuptools/tests/textwrap.py create mode 100644 lib/python3.11/site-packages/setuptools/unicode_utils.py create mode 100644 lib/python3.11/site-packages/setuptools/version.py create mode 100644 lib/python3.11/site-packages/setuptools/warnings.py create mode 100644 lib/python3.11/site-packages/setuptools/wheel.py create mode 100644 lib/python3.11/site-packages/setuptools/windows_support.py create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/LICENSE.APACHE2 create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/LICENSE.MIT create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/sniffio-1.3.1.dist-info/top_level.txt create mode 100644 lib/python3.11/site-packages/sniffio/__init__.py create mode 100644 lib/python3.11/site-packages/sniffio/_impl.py create mode 100644 lib/python3.11/site-packages/sniffio/_tests/__init__.py create mode 100644 lib/python3.11/site-packages/sniffio/_tests/test_sniffio.py create mode 100644 lib/python3.11/site-packages/sniffio/_version.py create mode 100644 lib/python3.11/site-packages/sniffio/py.typed create mode 100644 lib/python3.11/site-packages/socksio-1.0.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/socksio-1.0.0.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/socksio-1.0.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/socksio-1.0.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/socksio-1.0.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/socksio/__init__.py create mode 100644 lib/python3.11/site-packages/socksio/_types.py create mode 100644 lib/python3.11/site-packages/socksio/compat.py create mode 100644 lib/python3.11/site-packages/socksio/exceptions.py create mode 100644 lib/python3.11/site-packages/socksio/py.typed create mode 100644 lib/python3.11/site-packages/socksio/socks4.py create mode 100644 lib/python3.11/site-packages/socksio/socks5.py create mode 100644 lib/python3.11/site-packages/socksio/utils.py create mode 100644 lib/python3.11/site-packages/soupsieve-2.6.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/soupsieve-2.6.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/soupsieve-2.6.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/soupsieve-2.6.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/soupsieve-2.6.dist-info/licenses/LICENSE.md create mode 100644 lib/python3.11/site-packages/soupsieve/__init__.py create mode 100644 lib/python3.11/site-packages/soupsieve/__meta__.py create mode 100644 lib/python3.11/site-packages/soupsieve/css_match.py create mode 100644 lib/python3.11/site-packages/soupsieve/css_parser.py create mode 100644 lib/python3.11/site-packages/soupsieve/css_types.py create mode 100644 lib/python3.11/site-packages/soupsieve/pretty.py create mode 100644 lib/python3.11/site-packages/soupsieve/py.typed create mode 100644 lib/python3.11/site-packages/soupsieve/util.py create mode 100644 lib/python3.11/site-packages/twikit.egg-link create mode 100644 lib/python3.11/site-packages/typing_extensions-4.12.2.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/typing_extensions-4.12.2.dist-info/LICENSE create mode 100644 lib/python3.11/site-packages/typing_extensions-4.12.2.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/typing_extensions-4.12.2.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/typing_extensions-4.12.2.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/typing_extensions.py create mode 100644 lib/python3.11/site-packages/urllib3-2.3.0.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/urllib3-2.3.0.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/urllib3-2.3.0.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/urllib3-2.3.0.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/urllib3-2.3.0.dist-info/licenses/LICENSE.txt create mode 100644 lib/python3.11/site-packages/urllib3/__init__.py create mode 100644 lib/python3.11/site-packages/urllib3/_base_connection.py create mode 100644 lib/python3.11/site-packages/urllib3/_collections.py create mode 100644 lib/python3.11/site-packages/urllib3/_request_methods.py create mode 100644 lib/python3.11/site-packages/urllib3/_version.py create mode 100644 lib/python3.11/site-packages/urllib3/connection.py create mode 100644 lib/python3.11/site-packages/urllib3/connectionpool.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/__init__.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/emscripten/__init__.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/emscripten/connection.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/emscripten/emscripten_fetch_worker.js create mode 100644 lib/python3.11/site-packages/urllib3/contrib/emscripten/fetch.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/emscripten/request.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/emscripten/response.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/pyopenssl.py create mode 100644 lib/python3.11/site-packages/urllib3/contrib/socks.py create mode 100644 lib/python3.11/site-packages/urllib3/exceptions.py create mode 100644 lib/python3.11/site-packages/urllib3/fields.py create mode 100644 lib/python3.11/site-packages/urllib3/filepost.py create mode 100644 lib/python3.11/site-packages/urllib3/http2/__init__.py create mode 100644 lib/python3.11/site-packages/urllib3/http2/connection.py create mode 100644 lib/python3.11/site-packages/urllib3/http2/probe.py create mode 100644 lib/python3.11/site-packages/urllib3/poolmanager.py create mode 100644 lib/python3.11/site-packages/urllib3/py.typed create mode 100644 lib/python3.11/site-packages/urllib3/response.py create mode 100644 lib/python3.11/site-packages/urllib3/util/__init__.py create mode 100644 lib/python3.11/site-packages/urllib3/util/connection.py create mode 100644 lib/python3.11/site-packages/urllib3/util/proxy.py create mode 100644 lib/python3.11/site-packages/urllib3/util/request.py create mode 100644 lib/python3.11/site-packages/urllib3/util/response.py create mode 100644 lib/python3.11/site-packages/urllib3/util/retry.py create mode 100644 lib/python3.11/site-packages/urllib3/util/ssl_.py create mode 100644 lib/python3.11/site-packages/urllib3/util/ssl_match_hostname.py create mode 100644 lib/python3.11/site-packages/urllib3/util/ssltransport.py create mode 100644 lib/python3.11/site-packages/urllib3/util/timeout.py create mode 100644 lib/python3.11/site-packages/urllib3/util/url.py create mode 100644 lib/python3.11/site-packages/urllib3/util/util.py create mode 100644 lib/python3.11/site-packages/urllib3/util/wait.py create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.dist-info/INSTALLER create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.dist-info/LICENSE.txt create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.dist-info/METADATA create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.dist-info/RECORD create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.dist-info/WHEEL create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.dist-info/entry_points.txt create mode 100644 lib/python3.11/site-packages/wheel-0.45.1.virtualenv create mode 100644 lib/python3.11/site-packages/wheel/__init__.py create mode 100644 lib/python3.11/site-packages/wheel/__main__.py create mode 100644 lib/python3.11/site-packages/wheel/_bdist_wheel.py create mode 100644 lib/python3.11/site-packages/wheel/_setuptools_logging.py create mode 100644 lib/python3.11/site-packages/wheel/bdist_wheel.py create mode 100644 lib/python3.11/site-packages/wheel/cli/__init__.py create mode 100644 lib/python3.11/site-packages/wheel/cli/convert.py create mode 100644 lib/python3.11/site-packages/wheel/cli/pack.py create mode 100644 lib/python3.11/site-packages/wheel/cli/tags.py create mode 100644 lib/python3.11/site-packages/wheel/cli/unpack.py create mode 100644 lib/python3.11/site-packages/wheel/macosx_libfile.py create mode 100644 lib/python3.11/site-packages/wheel/metadata.py create mode 100644 lib/python3.11/site-packages/wheel/util.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/__init__.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/LICENSE create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/LICENSE.APACHE create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/LICENSE.BSD create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/__init__.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/_elffile.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/_manylinux.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/_musllinux.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/_parser.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/_structures.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/_tokenizer.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/markers.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/requirements.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/specifiers.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/tags.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/utils.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/packaging/version.py create mode 100644 lib/python3.11/site-packages/wheel/vendored/vendor.txt create mode 100644 lib/python3.11/site-packages/wheel/wheelfile.py create mode 100644 pyvenv.cfg create mode 100644 twikit.egg-info/PKG-INFO create mode 100644 twikit.egg-info/SOURCES.txt create mode 100644 twikit.egg-info/dependency_links.txt create mode 100644 twikit.egg-info/requires.txt create mode 100644 twikit.egg-info/top_level.txt create mode 100644 twikit/client/__init__.py diff --git a/bin/activate b/bin/activate new file mode 100644 index 00000000..95f86a87 --- /dev/null +++ b/bin/activate @@ -0,0 +1,87 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + + +if [ "${BASH_SOURCE-}" = "$0" ]; then + echo "You must source this script: \$ source $0" >&2 + exit 33 +fi + +deactivate () { + unset -f pydoc >/dev/null 2>&1 || true + + # reset old environment variables + # ! [ -z ${VAR+_} ] returns true if VAR is declared at all + if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # The hash command must be called to get it to forget past + # commands. Without forgetting past commands the $PATH changes + # we made may not be respected + hash -r 2>/dev/null + + if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV=/Users/charlesholtzkampf/sentnl/twikit +if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then + VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV") +fi +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/"bin":$PATH" +export PATH + +if [ "x"'' != x ] ; then + VIRTUAL_ENV_PROMPT='' +else + VIRTUAL_ENV_PROMPT=$(basename "$VIRTUAL_ENV") +fi +export VIRTUAL_ENV_PROMPT + +# unset PYTHONHOME if set +if ! [ -z "${PYTHONHOME+_}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1-}" + PS1="(${VIRTUAL_ENV_PROMPT}) ${PS1-}" + export PS1 +fi + +# Make sure to unalias pydoc if it's already there +alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true + +pydoc () { + python -m pydoc "$@" +} + +# The hash command must be called to get it to forget past +# commands. Without forgetting past commands the $PATH changes +# we made may not be respected +hash -r 2>/dev/null || true diff --git a/bin/activate.csh b/bin/activate.csh new file mode 100644 index 00000000..c3fe63d4 --- /dev/null +++ b/bin/activate.csh @@ -0,0 +1,55 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . + +set newline='\ +' + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV /Users/charlesholtzkampf/sentnl/twikit + +set _OLD_VIRTUAL_PATH="$PATH:q" +setenv PATH "$VIRTUAL_ENV:q/"bin":$PATH:q" + + + +if ('' != "") then + setenv VIRTUAL_ENV_PROMPT '' +else + setenv VIRTUAL_ENV_PROMPT "$VIRTUAL_ENV:t:q" +endif + +if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then + if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then + set do_prompt = "1" + else + set do_prompt = "0" + endif +else + set do_prompt = "1" +endif + +if ( $do_prompt == "1" ) then + # Could be in a non-interactive environment, + # in which case, $prompt is undefined and we wouldn't + # care about the prompt anyway. + if ( $?prompt ) then + set _OLD_VIRTUAL_PROMPT="$prompt:q" + if ( "$prompt:q" =~ *"$newline:q"* ) then + : + else + set prompt = '('"$VIRTUAL_ENV_PROMPT:q"') '"$prompt:q" + endif + endif +endif + +unset env_name +unset do_prompt + +alias pydoc python -m pydoc + +rehash diff --git a/bin/activate.fish b/bin/activate.fish new file mode 100644 index 00000000..8e6fc226 --- /dev/null +++ b/bin/activate.fish @@ -0,0 +1,103 @@ +# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. +# Do not run it directly. + +function _bashify_path -d "Converts a fish path to something bash can recognize" + set fishy_path $argv + set bashy_path $fishy_path[1] + for path_part in $fishy_path[2..-1] + set bashy_path "$bashy_path:$path_part" + end + echo $bashy_path +end + +function _fishify_path -d "Converts a bash path to something fish can recognize" + echo $argv | tr ':' '\n' +end + +function deactivate -d 'Exit virtualenv mode and return to the normal environment.' + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling + if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH") + else + set -gx PATH $_OLD_VIRTUAL_PATH + end + set -e _OLD_VIRTUAL_PATH + end + + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME" + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + and functions -q _old_fish_prompt + # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. + set -l fish_function_path + + # Erase virtualenv's `fish_prompt` and restore the original. + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + + if test "$argv[1]" != 'nondestructive' + # Self-destruct! + functions -e pydoc + functions -e deactivate + functions -e _bashify_path + functions -e _fishify_path + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV /Users/charlesholtzkampf/sentnl/twikit + +# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling +if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) +else + set -gx _OLD_VIRTUAL_PATH $PATH +end +set -gx PATH "$VIRTUAL_ENV"'/'bin $PATH + +# Prompt override provided? +# If not, just use the environment name. +if test -n '' + set -gx VIRTUAL_ENV_PROMPT '' +else + set -gx VIRTUAL_ENV_PROMPT (basename "$VIRTUAL_ENV") +end + +# Unset `$PYTHONHOME` if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +function pydoc + python -m pydoc $argv +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # Copy the current `fish_prompt` function as `_old_fish_prompt`. + functions -c fish_prompt _old_fish_prompt + + function fish_prompt + # Run the user's prompt first; it might depend on (pipe)status. + set -l prompt (_old_fish_prompt) + + printf '(%s) ' $VIRTUAL_ENV_PROMPT + + string join -- \n $prompt # handle multi-line prompts + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/bin/activate.nu b/bin/activate.nu new file mode 100644 index 00000000..64d3b99e --- /dev/null +++ b/bin/activate.nu @@ -0,0 +1,96 @@ +# virtualenv activation module +# Activate with `overlay use activate.nu` +# Deactivate with `deactivate`, as usual +# +# To customize the overlay name, you can call `overlay use activate.nu as foo`, +# but then simply `deactivate` won't work because it is just an alias to hide +# the "activate" overlay. You'd need to call `overlay hide foo` manually. + +export-env { + def is-string [x] { + ($x | describe) == 'string' + } + + def has-env [...names] { + $names | each {|n| + $n in $env + } | all {|i| $i == true} + } + + # Emulates a `test -z`, but better as it handles e.g 'false' + def is-env-true [name: string] { + if (has-env $name) { + # Try to parse 'true', '0', '1', and fail if not convertible + let parsed = (do -i { $env | get $name | into bool }) + if ($parsed | describe) == 'bool' { + $parsed + } else { + not ($env | get -i $name | is-empty) + } + } else { + false + } + } + + let virtual_env = r#'/Users/charlesholtzkampf/sentnl/twikit'# + let bin = r#'bin'# + + let is_windows = ($nu.os-info.family) == 'windows' + let path_name = (if (has-env 'Path') { + 'Path' + } else { + 'PATH' + } + ) + + let venv_path = ([$virtual_env $bin] | path join) + let new_path = ($env | get $path_name | prepend $venv_path) + + # If there is no default prompt, then use the env name instead + let virtual_env_prompt = (if (r#''# | is-empty) { + ($virtual_env | path basename) + } else { + r#''# + }) + + let new_env = { + $path_name : $new_path + VIRTUAL_ENV : $virtual_env + VIRTUAL_ENV_PROMPT : $virtual_env_prompt + } + + let new_env = (if (is-env-true 'VIRTUAL_ENV_DISABLE_PROMPT') { + $new_env + } else { + # Creating the new prompt for the session + let virtual_prefix = $'(char lparen)($virtual_env_prompt)(char rparen) ' + + # Back up the old prompt builder + let old_prompt_command = (if (has-env 'PROMPT_COMMAND') { + $env.PROMPT_COMMAND + } else { + '' + }) + + let new_prompt = (if (has-env 'PROMPT_COMMAND') { + if 'closure' in ($old_prompt_command | describe) { + {|| $'($virtual_prefix)(do $old_prompt_command)' } + } else { + {|| $'($virtual_prefix)($old_prompt_command)' } + } + } else { + {|| $'($virtual_prefix)' } + }) + + $new_env | merge { + PROMPT_COMMAND : $new_prompt + VIRTUAL_PREFIX : $virtual_prefix + } + }) + + # Environment variables that will be loaded as the virtual env + load-env $new_env +} + +export alias pydoc = python -m pydoc +export alias deactivate = overlay hide activate diff --git a/bin/activate.ps1 b/bin/activate.ps1 new file mode 100644 index 00000000..3333e2b4 --- /dev/null +++ b/bin/activate.ps1 @@ -0,0 +1,61 @@ +$script:THIS_PATH = $myinvocation.mycommand.path +$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent + +function global:deactivate([switch] $NonDestructive) { + if (Test-Path variable:_OLD_VIRTUAL_PATH) { + $env:PATH = $variable:_OLD_VIRTUAL_PATH + Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global + } + + if (Test-Path function:_old_virtual_prompt) { + $function:prompt = $function:_old_virtual_prompt + Remove-Item function:\_old_virtual_prompt + } + + if ($env:VIRTUAL_ENV) { + Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue + } + + if ($env:VIRTUAL_ENV_PROMPT) { + Remove-Item env:VIRTUAL_ENV_PROMPT -ErrorAction SilentlyContinue + } + + if (!$NonDestructive) { + # Self destruct! + Remove-Item function:deactivate + Remove-Item function:pydoc + } +} + +function global:pydoc { + python -m pydoc $args +} + +# unset irrelevant variables +deactivate -nondestructive + +$VIRTUAL_ENV = $BASE_DIR +$env:VIRTUAL_ENV = $VIRTUAL_ENV + +if ('' -ne "") { + $env:VIRTUAL_ENV_PROMPT = '' +} +else { + $env:VIRTUAL_ENV_PROMPT = $( Split-Path $env:VIRTUAL_ENV -Leaf ) +} + +New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH + +$env:PATH = "$env:VIRTUAL_ENV/" + 'bin' + ':' + $env:PATH +if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { + function global:_old_virtual_prompt { + "" + } + $function:_old_virtual_prompt = $function:prompt + + function global:prompt { + # Add the custom prefix to the existing prompt + $previous_prompt_value = & $function:_old_virtual_prompt + ("(" + $env:VIRTUAL_ENV_PROMPT + ") " + $previous_prompt_value) + } +} diff --git a/bin/activate_this.py b/bin/activate_this.py new file mode 100644 index 00000000..eaa07caa --- /dev/null +++ b/bin/activate_this.py @@ -0,0 +1,38 @@ +""" +Activate virtualenv for current interpreter: + +import runpy +runpy.run_path(this_file) + +This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. +""" # noqa: D415 + +from __future__ import annotations + +import os +import site +import sys + +try: + abs_file = os.path.abspath(__file__) +except NameError as exc: + msg = "You must use import runpy; runpy.run_path(this_file)" + raise AssertionError(msg) from exc + +bin_dir = os.path.dirname(abs_file) +base = bin_dir[: -len('bin') - 1] # strip away the bin part from the __file__, plus the path separator + +# prepend bin to PATH (this file is inside the bin directory) +os.environ["PATH"] = os.pathsep.join([bin_dir, *os.environ.get("PATH", "").split(os.pathsep)]) +os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory +os.environ["VIRTUAL_ENV_PROMPT"] = '' or os.path.basename(base) + +# add the virtual environments libraries to the host python import mechanism +prev_length = len(sys.path) +for lib in '../lib/python3.11/site-packages'.split(os.pathsep): + path = os.path.realpath(os.path.join(bin_dir, lib)) + site.addsitedir(path.decode("utf-8") if '' else path) +sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length] + +sys.real_prefix = sys.prefix +sys.prefix = base diff --git a/bin/filetype b/bin/filetype new file mode 100755 index 00000000..ddc81f9b --- /dev/null +++ b/bin/filetype @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from filetype.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/httpx b/bin/httpx new file mode 100755 index 00000000..952e42c4 --- /dev/null +++ b/bin/httpx @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from httpx import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/normalizer b/bin/normalizer new file mode 100755 index 00000000..1e217983 --- /dev/null +++ b/bin/normalizer @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli.cli_detect()) diff --git a/bin/pip b/bin/pip new file mode 100755 index 00000000..9a4f809e --- /dev/null +++ b/bin/pip @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/pip-3.11 b/bin/pip-3.11 new file mode 100755 index 00000000..9a4f809e --- /dev/null +++ b/bin/pip-3.11 @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/pip3 b/bin/pip3 new file mode 100755 index 00000000..9a4f809e --- /dev/null +++ b/bin/pip3 @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/pip3.11 b/bin/pip3.11 new file mode 100755 index 00000000..9a4f809e --- /dev/null +++ b/bin/pip3.11 @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/python b/bin/python new file mode 120000 index 00000000..79ab74b1 --- /dev/null +++ b/bin/python @@ -0,0 +1 @@ +/usr/local/bin/python3 \ No newline at end of file diff --git a/bin/python3 b/bin/python3 new file mode 120000 index 00000000..d8654aa0 --- /dev/null +++ b/bin/python3 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/bin/python3.11 b/bin/python3.11 new file mode 120000 index 00000000..d8654aa0 --- /dev/null +++ b/bin/python3.11 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/bin/wheel b/bin/wheel new file mode 100755 index 00000000..5ba5d0ac --- /dev/null +++ b/bin/wheel @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/wheel-3.11 b/bin/wheel-3.11 new file mode 100755 index 00000000..5ba5d0ac --- /dev/null +++ b/bin/wheel-3.11 @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/wheel3 b/bin/wheel3 new file mode 100755 index 00000000..5ba5d0ac --- /dev/null +++ b/bin/wheel3 @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/bin/wheel3.11 b/bin/wheel3.11 new file mode 100755 index 00000000..5ba5d0ac --- /dev/null +++ b/bin/wheel3.11 @@ -0,0 +1,8 @@ +#!/Users/charlesholtzkampf/sentnl/twikit/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/build/lib/twikit/__init__.py b/build/lib/twikit/__init__.py new file mode 100644 index 00000000..082bc822 --- /dev/null +++ b/build/lib/twikit/__init__.py @@ -0,0 +1,31 @@ +""" +========================== +Twikit Twitter API Wrapper +========================== + +https://github.com/d60/twikit +A Python library for interacting with the Twitter API. +""" + +__version__ = '2.2.1' + +import asyncio +import os + +if os.name == 'nt': + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + +from ._captcha import Capsolver +from .bookmark import BookmarkFolder +from .errors import * +from .utils import build_query +from .client.client import Client +from .community import Community, CommunityCreator, CommunityMember, CommunityRule +from .geo import Place +from .group import Group, GroupMessage +from .list import List +from .message import Message +from .notification import Notification +from .trend import Trend +from .tweet import CommunityNote, Poll, ScheduledTweet, Tweet +from .user import User diff --git a/build/lib/twikit/_captcha/__init__.py b/build/lib/twikit/_captcha/__init__.py new file mode 100644 index 00000000..85f95d91 --- /dev/null +++ b/build/lib/twikit/_captcha/__init__.py @@ -0,0 +1,2 @@ +from .base import CaptchaSolver +from .capsolver import Capsolver diff --git a/build/lib/twikit/_captcha/base.py b/build/lib/twikit/_captcha/base.py new file mode 100644 index 00000000..cdf376c7 --- /dev/null +++ b/build/lib/twikit/_captcha/base.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, NamedTuple + +from bs4 import BeautifulSoup +from httpx import Response +from ..constants import DOMAIN + +if TYPE_CHECKING: + from ..client.client import Client + + +class UnlockHTML(NamedTuple): + authenticity_token: str + assignment_token: str + needs_unlock: bool + start_button: bool + finish_button: bool + delete_button: bool + blob: str + + +class CaptchaSolver: + client: Client + max_attempts: int + + CAPTCHA_URL = f'https://{DOMAIN}/account/access' + CAPTCHA_SITE_KEY = '0152B4EB-D2DC-460A-89A1-629838B529C9' + + async def get_unlock_html(self) -> tuple[Response, UnlockHTML]: + headers = { + 'X-Twitter-Client-Language': 'en-US', + 'User-Agent': self.client._user_agent, + 'Upgrade-Insecure-Requests': '1' + } + _, response = await self.client.get( + self.CAPTCHA_URL, headers=headers + ) + return response, parse_unlock_html(response.text) + + async def ui_metrix(self) -> str: + js, _ = await self.client.get( + f'https://{DOMAIN}/i/js_inst?c_name=ui_metrics' + ) + return re.findall(r'return ({.*?});', js, re.DOTALL)[0] + + async def confirm_unlock( + self, + authenticity_token: str, + assignment_token: str, + verification_string: str = None, + ui_metrics: bool = False + ) -> tuple[Response, UnlockHTML]: + data = { + 'authenticity_token': authenticity_token, + 'assignment_token': assignment_token, + 'lang': 'en', + 'flow': '', + } + params = {} + if verification_string: + data['verification_string'] = verification_string + data['language_code'] = 'en' + params['lang'] = 'en' + if ui_metrics: + data['ui_metrics'] = await self.client._ui_metrix() + headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + 'Upgrade-Insecure-Requests': '1', + 'Referer': self.CAPTCHA_URL + } + _, response = await self.client.post( + self.CAPTCHA_URL, params=params, data=data, headers=headers + ) + return response, parse_unlock_html(response.text) + + +def parse_unlock_html(html: str) -> UnlockHTML: + soup = BeautifulSoup(html, 'lxml') + + authenticity_token = None + authenticity_token_element = soup.find( + 'input', {'name': 'authenticity_token'} + ) + if authenticity_token_element is not None: + authenticity_token: str = authenticity_token_element.get('value') + + assignment_token = None + assignment_token_element = soup.find('input', {'name': 'assignment_token'}) + if assignment_token_element is not None: + assignment_token = assignment_token_element.get('value') + + verification_string = soup.find('input', id='verification_string') + needs_unlock = bool(verification_string) + start_button = bool(soup.find('input', value='Start')) + finish_button = bool(soup.find('input', value='Continue to X')) + delete_button = bool(soup.find('input', value='Delete')) + + iframe = soup.find(id='arkose_iframe') + blob = re.findall(r'data=(.+)', iframe['src'])[0] if iframe else None + + return UnlockHTML( + authenticity_token, + assignment_token, + needs_unlock, + start_button, + finish_button, + delete_button, + blob + ) diff --git a/build/lib/twikit/_captcha/capsolver.py b/build/lib/twikit/_captcha/capsolver.py new file mode 100644 index 00000000..8224de87 --- /dev/null +++ b/build/lib/twikit/_captcha/capsolver.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from time import sleep + +import httpx + +from .base import CaptchaSolver + + +class Capsolver(CaptchaSolver): + """ + You can automatically unlock the account by passing the `captcha_solver` + argument when initialising the :class:`.Client`. + + First, visit https://capsolver.com and obtain your Capsolver API key. + Next, pass the Capsolver instance to the client as shown in the example. + + .. code-block:: python + + from twikit.twikit_async import Capsolver, Client + solver = Capsolver( + api_key='your_api_key', + max_attempts=10 + ) + client = Client(captcha_solver=solver) + + Parameters + ---------- + api_key : :class:`str` + Capsolver API key. + max_attempts : :class:`int`, default=3 + The maximum number of attempts to solve the captcha. + get_result_interval : :class:`float`, default=1.0 + + use_blob_data : :class:`bool`, default=False + """ + + def __init__( + self, + api_key: str, + max_attempts: int = 3, + get_result_interval: float = 1.0, + use_blob_data: bool = False + ) -> None: + self.api_key = api_key + self.get_result_interval = get_result_interval + self.max_attempts = max_attempts + self.use_blob_data = use_blob_data + + def create_task(self, task_data: dict) -> dict: + data = { + 'clientKey': self.api_key, + 'task': task_data + } + response = httpx.post( + 'https://api.capsolver.com/createTask', + json=data, + headers={'content-type': 'application/json'} + ).json() + return response + + def get_task_result(self, task_id: str) -> dict: + data = { + 'clientKey': self.api_key, + 'taskId': task_id + } + response = httpx.post( + 'https://api.capsolver.com/getTaskResult', + json=data, + headers={'content-type': 'application/json'} + ).json() + return response + + def solve_funcaptcha(self, blob: str) -> dict: + if self.client.proxy is None: + captcha_type = 'FunCaptchaTaskProxyLess' + else: + captcha_type = 'FunCaptchaTask' + + task_data = { + 'type': captcha_type, + 'websiteURL': 'https://iframe.arkoselabs.com', + 'websitePublicKey': self.CAPTCHA_SITE_KEY, + 'funcaptchaApiJSSubdomain': 'https://client-api.arkoselabs.com', + 'proxy': self.client.proxy + } + if self.use_blob_data: + task_data['data'] = '{"blob":"%s"}' % blob + task_data['userAgent'] = self.client._user_agent + task = self.create_task(task_data) + while True: + sleep(self.get_result_interval) + result = self.get_task_result(task['taskId']) + if result['status'] in ('ready', 'failed'): + return result diff --git a/build/lib/twikit/bookmark.py b/build/lib/twikit/bookmark.py new file mode 100644 index 00000000..2ea8c893 --- /dev/null +++ b/build/lib/twikit/bookmark.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from httpx import Response + + from .client.client import Client + from .tweet import Tweet + from .utils import Result + + +class BookmarkFolder: + """ + Attributes + ---------- + id : :class:`str` + The ID of the folder. + name : :class:`str` + The name of the folder + media : :class:`str` + Icon image data. + """ + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + self.id: str = data['id'] + self.name: str = data['name'] + self.media: dict = data['media'] + + async def get_tweets(self, cursor: str | None = None) -> Result[Tweet]: + """ + Retrieves tweets from the folder. + """ + return await self._client.get_bookmarks( + cursor=cursor, folder_id=self.id + ) + + async def edit(self, name: str) -> BookmarkFolder: + """ + Edits the folder. + """ + return await self._client.edit_bookmark_folder(self.id, name) + + async def delete(self) -> Response: + """ + Deletes the folder. + """ + return await self._client.delete_bookmark_folder(self.id) + + async def add(self, tweet_id: str) -> Response: + """ + Adds a tweet to the folder. + """ + return await self._client.bookmark_tweet(tweet_id, self.id) + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, BookmarkFolder) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + def __repr__(self) -> str: + return f'' diff --git a/build/lib/twikit/client/__init__.py b/build/lib/twikit/client/__init__.py new file mode 100644 index 00000000..d3b50589 --- /dev/null +++ b/build/lib/twikit/client/__init__.py @@ -0,0 +1,4 @@ +from .client import Client +from .gql import GQLClient, Endpoint + +__all__ = ['Client', 'GQLClient', 'Endpoint'] \ No newline at end of file diff --git a/build/lib/twikit/client/client.py b/build/lib/twikit/client/client.py new file mode 100644 index 00000000..6da42a6f --- /dev/null +++ b/build/lib/twikit/client/client.py @@ -0,0 +1,4295 @@ +from __future__ import annotations + +import asyncio +import io +import json +import re + +import warnings +from functools import partial +from typing import Any, AsyncGenerator, Literal +from urllib.parse import urlparse + +import filetype +import pyotp +from httpx import AsyncClient, AsyncHTTPTransport, Response +from httpx._utils import URLPattern + +from .._captcha import Capsolver +from ..bookmark import BookmarkFolder +from ..community import Community, CommunityMember +from ..constants import TOKEN, DOMAIN +from ..errors import ( + AccountLocked, + AccountSuspended, + BadRequest, + CouldNotTweet, + Forbidden, + InvalidMedia, + NotFound, + RequestTimeout, + ServerError, + TooManyRequests, + TweetNotAvailable, + TwitterException, + Unauthorized, + UserNotFound, + UserUnavailable, + raise_exceptions_from_response +) +from ..geo import Place, _places_from_response +from ..group import Group, GroupMessage +from ..list import List +from ..message import Message +from ..notification import Notification +from ..streaming import Payload, StreamingSession, _payload_from_data +from ..trend import Location, PlaceTrend, PlaceTrends, Trend +from ..tweet import CommunityNote, Poll, ScheduledTweet, Tweet, tweet_from_data +from ..user import User +from ..utils import ( + Flow, + Result, + build_tweet_data, + build_user_data, + find_dict, + find_entry_by_type, + httpx_transport_to_url +) +from ..x_client_transaction.utils import handle_x_migration +from ..x_client_transaction import ClientTransaction +from .gql import GQLClient +from .v11 import V11Client + + +class Client: + """ + A client for interacting with the Twitter API. + Since this class is for asynchronous use, + methods must be executed using await. + + Parameters + ---------- + language : :class:`str` | None, default=None + The language code to use in API requests. + proxy : :class:`str` | None, default=None + The proxy server URL to use for request + (e.g., 'http://0.0.0.0:0000'). + captcha_solver : :class:`.Capsolver` | None, default=None + See :class:`.Capsolver`. + + Examples + -------- + >>> client = Client(language='en-US') + + >>> await client.login( + ... auth_info_1='example_user', + ... auth_info_2='email@example.com', + ... password='00000000' + ... ) + """ + + def __init__( + self, + language: str = 'en-US', + proxy: str | None = None, + captcha_solver: Capsolver | None = None, + user_agent: str | None = None, + **kwargs + ) -> None: + if 'proxies' in kwargs: + message = ( + "The 'proxies' argument is now deprecated. Use 'proxy' " + "instead. https://github.com/encode/httpx/pull/2879" + ) + warnings.warn(message) + + self.http = AsyncClient(proxy=proxy, **kwargs) + self.language = language + self.proxy = proxy + self.captcha_solver = captcha_solver + if captcha_solver is not None: + captcha_solver.client = self + self.client_transaction = ClientTransaction() + + self._token = TOKEN + self._user_id = None + self._user_agent = user_agent or 'Mozilla/5.0 (Macintosh; Intel Mac OS X 14_6_1) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.5 Safari/605.1.15' + self._act_as = None + + self.gql = GQLClient(self) + self.v11 = V11Client(self) + + async def request( + self, + method: str, + url: str, + auto_unlock: bool = True, + raise_exception: bool = True, + **kwargs + ) -> tuple[dict | Any, Response]: + ':meta private:' + headers = kwargs.pop('headers', {}) + + if not self.client_transaction.home_page_response: + cookies_backup = self.get_cookies().copy() + ct_headers = { + 'Accept-Language': f'{self.language},{self.language.split("-")[0]};q=0.9', + 'Cache-Control': 'no-cache', + 'Referer': f'https://{DOMAIN}', + 'User-Agent': self._user_agent + } + await self.client_transaction.init(self.http, ct_headers) + self.set_cookies(cookies_backup, clear_cookies=True) + + tid = self.client_transaction.generate_transaction_id(method=method, path=urlparse(url).path) + headers['X-Client-Transaction-Id'] = tid + + cookies_backup = self.get_cookies().copy() + response = await self.http.request(method, url, headers=headers, **kwargs) + self._remove_duplicate_ct0_cookie() + + try: + response_data = response.json() + except json.decoder.JSONDecodeError: + response_data = response.text + + if isinstance(response_data, dict) and 'errors' in response_data: + error_code = response_data['errors'][0]['code'] + error_message = response_data['errors'][0].get('message') + if error_code in (37, 64): + # Account suspended + raise AccountSuspended(error_message) + + if error_code == 326: + # Account unlocking + if self.captcha_solver is None: + raise AccountLocked( + 'Your account is locked. Visit ' + f'https://{DOMAIN}/account/access to unlock it.' + ) + if auto_unlock: + await self.unlock() + self.set_cookies(cookies_backup, clear_cookies=True) + response = await self.http.request(method, url, **kwargs) + self._remove_duplicate_ct0_cookie() + try: + response_data = response.json() + except json.decoder.JSONDecodeError: + response_data = response.text + + status_code = response.status_code + + if status_code >= 400 and raise_exception: + message = f'status: {status_code}, message: "{response.text}"' + if status_code == 400: + raise BadRequest(message, headers=response.headers) + elif status_code == 401: + raise Unauthorized(message, headers=response.headers) + elif status_code == 403: + raise Forbidden(message, headers=response.headers) + elif status_code == 404: + raise NotFound(message, headers=response.headers) + elif status_code == 408: + raise RequestTimeout(message, headers=response.headers) + elif status_code == 429: + if await self._get_user_state() == 'suspended': + raise AccountSuspended(message, headers=response.headers) + raise TooManyRequests(message, headers=response.headers) + elif 500 <= status_code < 600: + raise ServerError(message, headers=response.headers) + else: + raise TwitterException(message, headers=response.headers) + + if status_code == 200: + return response_data, response + + return response_data, response + + async def get(self, url, **kwargs) -> tuple[dict | Any, Response]: + ':meta private:' + return await self.request('GET', url, **kwargs) + + async def post(self, url, **kwargs) -> tuple[dict | Any, Response]: + ':meta private:' + return await self.request('POST', url, **kwargs) + + def _remove_duplicate_ct0_cookie(self) -> None: + cookies = {} + for cookie in self.http.cookies.jar: + if 'ct0' in cookies and cookie.name == 'ct0': + continue + cookies[cookie.name] = cookie.value + self.http.cookies = list(cookies.items()) + + @property + def proxy(self) -> str: + ':meta private:' + transport: AsyncHTTPTransport = self.http._mounts.get(URLPattern('all://')) + if transport is None: + return None + if not hasattr(transport._pool, '_proxy_url'): + return None + return httpx_transport_to_url(transport) + + @proxy.setter + def proxy(self, url: str) -> None: + self.http._mounts = {URLPattern('all://'): AsyncHTTPTransport(proxy=url)} + + def _get_csrf_token(self) -> str: + """ + Retrieves the Cross-Site Request Forgery (CSRF) token from the + current session's cookies. + + Returns + ------- + :class:`str` + The CSRF token as a string. + """ + return self.http.cookies.get('ct0') + + @property + def _base_headers(self) -> dict[str, str]: + """ + Base headers for Twitter API requests. + """ + headers = { + 'authorization': f'Bearer {self._token}', + 'content-type': 'application/json', + 'X-Twitter-Auth-Type': 'OAuth2Session', + 'X-Twitter-Active-User': 'yes', + 'Referer': f'https://{DOMAIN}/', + 'User-Agent': self._user_agent, + } + + if self.language is not None: + headers['Accept-Language'] = self.language + headers['X-Twitter-Client-Language'] = self.language + + csrf_token = self._get_csrf_token() + if csrf_token is not None: + headers['X-Csrf-Token'] = csrf_token + if self._act_as is not None: + headers['X-Act-As-User-Id'] = self._act_as + return headers + + async def _get_guest_token(self) -> str: + response, _ = await self.v11.guest_activate() + guest_token = response['guest_token'] + return guest_token + + async def _ui_metrix(self) -> str: + js, _ = await self.get(f'https://twitter.com/i/js_inst?c_name=ui_metrics') # keep twitter.com here + return re.findall(r'return ({.*?});', js, re.DOTALL)[0] + + async def login( + self, + *, + auth_info_1: str, + auth_info_2: str | None = None, + password: str, + totp_secret: str | None = None + ) -> dict: + """ + Logs into the account using the specified login information. + `auth_info_1` and `password` are required parameters. + `auth_info_2` is optional and can be omitted, but it is + recommended to provide if available. + The order in which you specify authentication information + (auth_info_1 and auth_info_2) is flexible. + + Parameters + ---------- + auth_info_1 : :class:`str` + The first piece of authentication information, + which can be a username, email address, or phone number. + auth_info_2 : :class:`str`, default=None + The second piece of authentication information, + which is optional but recommended to provide. + It can be a username, email address, or phone number. + password : :class:`str` + The password associated with the account. + totp_secret : :class:`str` + The TOTP (Time-Based One-Time Password) secret key used for + two-factor authentication (2FA). + + Examples + -------- + >>> await client.login( + ... auth_info_1='example_user', + ... auth_info_2='email@example.com', + ... password='00000000' + ... ) + """ + self.http.cookies.clear() + guest_token = await self._get_guest_token() + + flow = Flow(self, guest_token) + + await flow.execute_task(params={'flow_name': 'login'}, data={ + 'input_flow_data': { + 'flow_context': { + 'debug_overrides': {}, + 'start_location': { + 'location': 'splash_screen' + } + } + }, + 'subtask_versions': { + 'action_list': 2, + 'alert_dialog': 1, + 'app_download_cta': 1, + 'check_logged_in_account': 1, + 'choice_selection': 3, + 'contacts_live_sync_permission_prompt': 0, + 'cta': 7, + 'email_verification': 2, + 'end_flow': 1, + 'enter_date': 1, + 'enter_email': 2, + 'enter_password': 5, + 'enter_phone': 2, + 'enter_recaptcha': 1, + 'enter_text': 5, + 'enter_username': 2, + 'generic_urt': 3, + 'in_app_notification': 1, + 'interest_picker': 3, + 'js_instrumentation': 1, + 'menu_dialog': 1, + 'notifications_permission_prompt': 2, + 'open_account': 2, + 'open_home_timeline': 1, + 'open_link': 1, + 'phone_verification': 4, + 'privacy_options': 1, + 'security_key': 3, + 'select_avatar': 4, + 'select_banner': 2, + 'settings_list': 7, + 'show_code': 1, + 'sign_up': 2, + 'sign_up_review': 4, + 'tweet_selection_urt': 1, + 'update_users': 1, + 'upload_media': 1, + 'user_recommendations_list': 4, + 'user_recommendations_urt': 1, + 'wait_spinner': 3, + 'web_modal': 1 + } + }) + await flow.sso_init('apple') + await flow.execute_task({ + "subtask_id": "LoginJsInstrumentationSubtask", + "js_instrumentation": { + "response": await self._ui_metrix(), + "link": "next_link" + } + }) + await flow.execute_task({ + 'subtask_id': 'LoginEnterUserIdentifierSSO', + 'settings_list': { + 'setting_responses': [ + { + 'key': 'user_identifier', + 'response_data': { + 'text_data': {'result': auth_info_1} + } + } + ], + 'link': 'next_link' + } + }) + + if flow.task_id == 'LoginEnterAlternateIdentifierSubtask': + await flow.execute_task({ + 'subtask_id': 'LoginEnterAlternateIdentifierSubtask', + 'enter_text': { + 'text': auth_info_2, + 'link': 'next_link' + } + }) + + if flow.task_id == 'DenyLoginSubtask': + raise TwitterException(flow.response['subtasks'][0]['cta']['secondary_text']['text']) + + await flow.execute_task({ + 'subtask_id': 'LoginEnterPassword', + 'enter_password': { + 'password': password, + 'link': 'next_link' + } + }) + + if flow.task_id == 'DenyLoginSubtask': + raise TwitterException(flow.response['subtasks'][0]['cta']['secondary_text']['text']) + + if flow.task_id == 'LoginAcid': + print(find_dict(flow.response, 'secondary_text', find_one=True)[0]['text']) + + await flow.execute_task({ + 'subtask_id': 'LoginAcid', + 'enter_text': { + 'text': input('>>> '), + 'link': 'next_link' + } + }) + return flow.response + + await flow.execute_task({ + 'subtask_id': 'AccountDuplicationCheck', + 'check_logged_in_account': { + 'link': 'AccountDuplicationCheck_false' + } + }) + + if not flow.response['subtasks']: + return + + self._user_id = find_dict(flow.response, 'id_str', find_one=True)[0] + + if flow.task_id == 'LoginTwoFactorAuthChallenge': + if totp_secret is None: + print(find_dict(flow.response, 'secondary_text', find_one=True)[0]['text']) + totp_code = input('>>>') + else: + totp_code = pyotp.TOTP(totp_secret).now() + + await flow.execute_task({ + 'subtask_id': 'LoginTwoFactorAuthChallenge', + 'enter_text': { + 'text': totp_code, + 'link': 'next_link' + } + }) + + return flow.response + + async def logout(self) -> Response: + """ + Logs out of the currently logged-in account. + """ + response, _ = await self.v11.account_logout() + return response + + async def unlock(self) -> None: + """ + Unlocks the account using the provided CAPTCHA solver. + + See Also + -------- + .capsolver + """ + if self.captcha_solver is None: + raise ValueError('Captcha solver is not provided.') + + response, html = await self.captcha_solver.get_unlock_html() + + if html.delete_button: + response, html = await self.captcha_solver.confirm_unlock( + html.authenticity_token, + html.assignment_token, + ui_metrics=True + ) + + if html.start_button or html.finish_button: + response, html = await self.captcha_solver.confirm_unlock( + html.authenticity_token, + html.assignment_token, + ui_metrics=True + ) + + cookies_backup = self.get_cookies().copy() + max_unlock_attempts = self.captcha_solver.max_attempts + attempt = 0 + while attempt < max_unlock_attempts: + attempt += 1 + + if html.authenticity_token is None: + response, html = await self.captcha_solver.get_unlock_html() + + result = self.captcha_solver.solve_funcaptcha(html.blob) + if result['errorId'] == 1: + continue + + self.set_cookies(cookies_backup, clear_cookies=True) + response, html = await self.captcha_solver.confirm_unlock( + html.authenticity_token, + html.assignment_token, + result['solution']['token'], + ) + + if html.finish_button: + response, html = await self.captcha_solver.confirm_unlock( + html.authenticity_token, + html.assignment_token, + ui_metrics=True + ) + finished = ( + response.next_request is not None and + response.next_request.url.path == '/' + ) + if finished: + return + raise Exception('could not unlock the account.') + + def get_cookies(self) -> dict: + """ + Get the cookies. + You can skip the login procedure by loading the saved cookies + using the :func:`set_cookies` method. + + Examples + -------- + >>> client.get_cookies() + + See Also + -------- + .set_cookies + .load_cookies + .save_cookies + """ + return dict(self.http.cookies) + + def save_cookies(self, path: str) -> None: + """ + Save cookies to file in json format. + You can skip the login procedure by loading the saved cookies + using the :func:`load_cookies` method. + + Parameters + ---------- + path : :class:`str` + The path to the file where the cookie will be stored. + + Examples + -------- + >>> client.save_cookies('cookies.json') + + See Also + -------- + .load_cookies + .get_cookies + .set_cookies + """ + with open(path, 'w', encoding='utf-8') as f: + json.dump(self.get_cookies(), f) + + def set_cookies(self, cookies: dict, clear_cookies: bool = False) -> None: + """ + Sets cookies. + You can skip the login procedure by loading a saved cookies. + + Parameters + ---------- + cookies : :class:`dict` + The cookies to be set as key value pair. + + Examples + -------- + >>> with open('cookies.json', 'r', encoding='utf-8') as f: + ... client.set_cookies(json.load(f)) + + See Also + -------- + .get_cookies + .load_cookies + .save_cookies + """ + if clear_cookies: + self.http.cookies.clear() + self.http.cookies.update(cookies) + + def load_cookies(self, path: str) -> None: + """ + Loads cookies from a file. + You can skip the login procedure by loading a saved cookies. + + Parameters + ---------- + path : :class:`str` + Path to the file where the cookie is stored. + + Examples + -------- + >>> client.load_cookies('cookies.json') + + See Also + -------- + .get_cookies + .save_cookies + .set_cookies + """ + with open(path, 'r', encoding='utf-8') as f: + self.set_cookies(json.load(f)) + + def set_delegate_account(self, user_id: str | None) -> None: + """ + Sets the account to act as. + + Parameters + ---------- + user_id : :class:`str` | None + The user ID of the account to act as. + Set to None to clear the delegated account. + """ + self._act_as = user_id + + async def user_id(self) -> str: + """ + Retrieves the user ID associated with the authenticated account. + """ + if self._user_id is not None: + return self._user_id + response, _ = await self.v11.settings() + screen_name = response['screen_name'] + self._user_id = (await self.get_user_by_screen_name(screen_name)).id + return self._user_id + + async def user(self) -> User: + """ + Retrieve detailed information about the authenticated user. + """ + return await self.get_user_by_id(await self.user_id()) + + async def search_tweet( + self, + query: str, + product: Literal['Top', 'Latest', 'Media'], + count: int = 20, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Searches for tweets based on the specified query and + product type. + + Parameters + ---------- + query : :class:`str` + The search query. + product : {'Top', 'Latest', 'Media'} + The type of tweets to retrieve. + count : :class:`int`, default=20 + The number of tweets to retrieve, between 1 and 20. + cursor : :class:`str`, default=20 + Token to retrieve more tweets. + + Returns + ------- + Result[:class:`Tweet`] + An instance of the `Result` class containing the + search results. + + Examples + -------- + >>> tweets = await client.search_tweet('query', 'Top') + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + + >>> # Retrieve previous tweets + >>> previous_tweets = await tweets.previous() + """ + product = product.capitalize() + + response, _ = await self.gql.search_timeline(query, product, count, cursor) + instructions = find_dict(response, 'instructions', find_one=True) + if not instructions: + return Result([]) + instructions = instructions[0] + + if product == 'Media' and cursor is not None: + items = find_dict(instructions, 'moduleItems', find_one=True)[0] + else: + items_ = find_dict(instructions, 'entries', find_one=True) + if items_: + items = items_[0] + else: + items = [] + if product == 'Media': + if 'items' in items[0]['content']: + items = items[0]['content']['items'] + else: + items = [] + + next_cursor = None + previous_cursor = None + + results = [] + for item in items: + if item['entryId'].startswith('cursor-bottom'): + next_cursor = item['content']['value'] + if item['entryId'].startswith('cursor-top'): + previous_cursor = item['content']['value'] + if not item['entryId'].startswith(('tweet', 'search-grid')): + continue + + tweet = tweet_from_data(self, item) + if tweet is not None: + results.append(tweet) + + if next_cursor is None: + if product == 'Media': + entries = find_dict(instructions, 'entries', find_one=True)[0] + next_cursor = entries[-1]['content']['value'] + previous_cursor = entries[-2]['content']['value'] + else: + next_cursor = instructions[-1]['entry']['content']['value'] + previous_cursor = instructions[-2]['entry']['content']['value'] + + return Result( + results, + partial(self.search_tweet, query, product, count, next_cursor), + next_cursor, + partial(self.search_tweet, query, product, count, previous_cursor), + previous_cursor + ) + + async def search_user( + self, + query: str, + count: int = 20, + cursor: str | None = None + ) -> Result[User]: + """ + Searches for users based on the provided query. + + Parameters + ---------- + query : :class:`str` + The search query for finding users. + count : :class:`int`, default=20 + The number of users to retrieve in each request. + cursor : :class:`str`, default=None + Token to retrieve more users. + + Returns + ------- + Result[:class:`User`] + An instance of the `Result` class containing the + search results. + + Examples + -------- + >>> result = await client.search_user('query') + >>> for user in result: + ... print(user) + + + ... + ... + + >>> more_results = await result.next() # Retrieve more search results + >>> for user in more_results: + ... print(user) + + + ... + ... + """ + response, _ = await self.gql.search_timeline(query, 'People', count, cursor) + items = find_dict(response, 'entries', find_one=True)[0] + next_cursor = items[-1]['content']['value'] + + results = [] + for item in items: + if 'itemContent' not in item['content']: + continue + user_info = find_dict(item, 'result', find_one=True)[0] + results.append(User(self, user_info)) + + return Result( + results, + partial(self.search_user, query, count, next_cursor), + next_cursor + ) + + async def get_similar_tweets(self, tweet_id: str) -> list[Tweet]: + """ + Retrieves tweets similar to the specified tweet (Twitter premium only). + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet for which similar tweets are to be retrieved. + + Returns + ------- + list[:class:`Tweet`] + A list of Tweet objects representing tweets + similar to the specified tweet. + """ + response, _ = await self.gql.similar_posts(tweet_id) + items_ = find_dict(response, 'entries', find_one=True) + results = [] + if not items_: + return results + + for item in items_[0]: + if not item['entryId'].startswith('tweet'): + continue + + tweet = tweet_from_data(self, item) + if tweet is not None: + results.append(tweet) + + return results + + async def get_user_highlights_tweets( + self, + user_id: str, + count: int = 20, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves highlighted tweets from a user's timeline. + + Parameters + ---------- + user_id : :class:`str` + The user ID + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + An instance of the `Result` class containing the highlighted tweets. + + Examples + -------- + >>> result = await client.get_user_highlights_tweets('123456789') + >>> for tweet in result: + ... print(tweet) + + + ... + ... + + >>> more_results = await result.next() # Retrieve more highlighted tweets + >>> for tweet in more_results: + ... print(tweet) + + + ... + ... + """ + response, _ = await self.gql.user_highlights_tweets(user_id, count, cursor) + + instructions = response['data']['user']['result']['timeline']['timeline']['instructions'] + instruction = find_entry_by_type(instructions, 'TimelineAddEntries') + if instruction is None: + return Result.empty() + entries = instruction['entries'] + previous_cursor = None + next_cursor = None + results = [] + + for entry in entries: + entryId = entry['entryId'] + if entryId.startswith('tweet'): + results.append(tweet_from_data(self, entry)) + elif entryId.startswith('cursor-top'): + previous_cursor = entry['content']['value'] + elif entryId.startswith('cursor-bottom'): + next_cursor = entry['content']['value'] + + return Result( + results, + partial(self.get_user_highlights_tweets, user_id, count, next_cursor), + next_cursor, + partial(self.get_user_highlights_tweets, user_id, count, previous_cursor), + previous_cursor + ) + + async def upload_media( + self, + source: str | bytes, + wait_for_completion: bool = False, + status_check_interval: float | None = None, + media_type: str | None = None, + media_category: str | None = None, + is_long_video: bool = False + ) -> str: + """ + Uploads media to twitter. + + Parameters + ---------- + source : :class:`str` | :class:`bytes` + The source of the media to be uploaded. + It can be either a file path or bytes of the media content. + wait_for_completion : :class:`bool`, default=False + Whether to wait for the completion of the media upload process. + status_check_interval : :class:`float`, default=1.0 + The interval (in seconds) to check the status of the + media upload process. + media_type : :class:`str`, default=None + The MIME type of the media. + If not specified, it will be guessed from the source. + media_category : :class:`str`, default=None + The media category. + is_long_video : :class:`bool`, default=False + If this is True, videos longer than 2:20 can be uploaded. + (Twitter Premium only) + + Returns + ------- + :class:`str` + The media ID of the uploaded media. + + Examples + -------- + Videos, images and gifs can be uploaded. + + >>> media_id_1 = await client.upload_media( + ... 'media1.jpg', + ... ) + + >>> media_id_2 = await client.upload_media( + ... 'media2.mp4', + ... wait_for_completion=True + ... ) + + >>> media_id_3 = await client.upload_media( + ... 'media3.gif', + ... wait_for_completion=True, + ... media_category='tweet_gif' # media_category must be specified + ... ) + """ + if not isinstance(wait_for_completion, bool): + raise TypeError( + 'wait_for_completion must be bool,' + f' not {wait_for_completion.__class__.__name__}' + ) + + if isinstance(source, str): + # If the source is a path + with open(source, 'rb') as file: + binary = file.read() + elif isinstance(source, bytes): + # If the source is bytes + binary = source + + if media_type is None: + # Guess mimetype if not specified + media_type = filetype.guess(binary).mime + + if wait_for_completion: + if media_type == 'image/gif': + if media_category is None: + raise TwitterException( + "`media_category` must be specified to check the " + "upload status of gif images ('dm_gif' or 'tweet_gif')" + ) + elif media_type.startswith('image'): + # Checking the upload status of an image is impossible. + wait_for_completion = False + + total_bytes = len(binary) + + # ============ INIT ============= + response, _ = await self.v11.upload_media_init( + media_type, total_bytes, media_category, is_long_video + ) + media_id = response['media_id'] + # =========== APPEND ============ + segment_index = 0 + bytes_sent = 0 + MAX_SEGMENT_SIZE = 8 * 1024 * 1024 # The maximum segment size is 8 MB + append_tasks = [] + chunk_streams: list[io.BytesIO] = [] + + while bytes_sent < total_bytes: + chunk = binary[bytes_sent:bytes_sent + MAX_SEGMENT_SIZE] + chunk_stream = io.BytesIO(chunk) + coro = self.v11.upload_media_append(is_long_video, media_id, segment_index, chunk_stream) + append_tasks.append(asyncio.create_task(coro)) + chunk_streams.append(chunk_stream) + + segment_index += 1 + bytes_sent += len(chunk) + + append_gather = asyncio.gather(*append_tasks) + await append_gather + + # Close chunk streams + for chunk_stream in chunk_streams: + chunk_stream.close() + + # ========== FINALIZE =========== + await self.v11.upload_media_finelize(is_long_video, media_id) + # =============================== + + if wait_for_completion: + while True: + state = await self.check_media_status(media_id, is_long_video) + processing_info = state['processing_info'] + if 'error' in processing_info: + raise InvalidMedia(processing_info['error'].get('message')) + if processing_info['state'] == 'succeeded': + break + await asyncio.sleep(status_check_interval or processing_info['check_after_secs']) + + return media_id + + async def check_media_status( + self, media_id: str, is_long_video: bool = False + ) -> dict: + """ + Check the status of uploaded media. + + Parameters + ---------- + media_id : :class:`str` + The media ID of the uploaded media. + + Returns + ------- + dict + A dictionary containing information about the status of + the uploaded media. + """ + response, _ = await self.v11.upload_media_status(is_long_video, media_id) + return response + + async def create_media_metadata( + self, + media_id: str, + alt_text: str | None = None, + sensitive_warning: list[Literal['adult_content', 'graphic_violence', 'other']] = None + ) -> Response: + """ + Adds metadata to uploaded media. + + Parameters + ---------- + media_id : :class:`str` + The media id for which to create metadata. + alt_text : :class:`str` | None, default=None + Alternative text for the media. + sensitive_warning : list{'adult_content', 'graphic_violence', 'other'} + A list of sensitive content warnings for the media. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> media_id = await client.upload_media('media.jpg') + >>> await client.create_media_metadata( + ... media_id, + ... alt_text='This is a sample media', + ... sensitive_warning=['other'] + ... ) + >>> await client.create_tweet(media_ids=[media_id]) + """ + _, response = await self.v11.create_media_metadata(media_id, alt_text, sensitive_warning) + return response + + async def create_poll( + self, + choices: list[str], + duration_minutes: int + ) -> str: + """ + Creates a poll and returns card-uri. + + Parameters + ---------- + choices : list[:class:`str`] + A list of choices for the poll. Maximum of 4 choices. + duration_minutes : :class:`int` + The duration of the poll in minutes. + + Returns + ------- + :class:`str` + The URI of the created poll card. + + Examples + -------- + Create a poll with three choices lasting for 60 minutes: + + >>> choices = ['Option A', 'Option B', 'Option C'] + >>> duration_minutes = 60 + >>> card_uri = await client.create_poll(choices, duration_minutes) + >>> print(card_uri) + 'card://0000000000000000000' + """ + response, _ = await self.v11.create_card(choices, duration_minutes) + return response['card_uri'] + + async def vote( + self, + selected_choice: str, + card_uri: str, + tweet_id: str, + card_name: str + ) -> Poll: + """ + Vote on a poll with the selected choice. + Parameters + ---------- + selected_choice : :class:`str` + The label of the selected choice for the vote. + card_uri : :class:`str` + The URI of the poll card. + tweet_id : :class:`str` + The ID of the original tweet containing the poll. + card_name : :class:`str` + The name of the poll card. + Returns + ------- + :class:`Poll` + The Poll object representing the updated poll after voting. + """ + response, _ = await self.v11.vote(selected_choice, card_uri, tweet_id, card_name) + card_data = { + 'rest_id': response['card']['url'], + 'legacy': response['card'] + } + return Poll(self, card_data, None) + + async def create_tweet( + self, + text: str = '', + media_ids: list[str] | None = None, + poll_uri: str | None = None, + reply_to: str | None = None, + conversation_control: Literal['followers', 'verified', 'mentioned'] | None = None, + attachment_url: str | None = None, + community_id: str | None = None, + share_with_followers: bool = False, + is_note_tweet: bool = False, + richtext_options: list[dict] = None, + edit_tweet_id: str | None = None + ) -> Tweet: + """ + Creates a new tweet on Twitter with the specified + text, media, and poll. + + Parameters + ---------- + text : :class:`str`, default='' + The text content of the tweet. + media_ids : list[:class:`str`], default=None + A list of media IDs or URIs to attach to the tweet. + media IDs can be obtained by using the `upload_media` method. + poll_uri : :class:`str`, default=None + The URI of a Twitter poll card to attach to the tweet. + Poll URIs can be obtained by using the `create_poll` method. + reply_to : :class:`str`, default=None + The ID of the tweet to which this tweet is a reply. + conversation_control : {'followers', 'verified', 'mentioned'} + The type of conversation control for the tweet: + - 'followers': Limits replies to followers only. + - 'verified': Limits replies to verified accounts only. + - 'mentioned': Limits replies to mentioned accounts only. + attachment_url : :class:`str` + URL of the tweet to be quoted. + is_note_tweet : :class:`bool`, default=False + If this option is set to True, tweets longer than 280 characters + can be posted (Twitter Premium only). + richtext_options : list[:class:`dict`], default=None + Options for decorating text (Twitter Premium only). + edit_tweet_id : :class:`str` | None, default=None + ID of the tweet to edit (Twitter Premium only). + + Raises + ------ + :exc:`DuplicateTweet` : If the tweet is a duplicate of another tweet. + + Returns + ------- + :class:`Tweet` + The Created Tweet. + + Examples + -------- + Create a tweet with media: + + >>> tweet_text = 'Example text' + >>> media_ids = [ + ... await client.upload_media('image1.png'), + ... await client.upload_media('image2.png') + ... ] + >>> await client.create_tweet( + ... tweet_text, + ... media_ids=media_ids + ... ) + + Create a tweet with a poll: + + >>> tweet_text = 'Example text' + >>> poll_choices = ['Option A', 'Option B', 'Option C'] + >>> duration_minutes = 60 + >>> poll_uri = await client.create_poll(poll_choices, duration_minutes) + >>> await client.create_tweet( + ... tweet_text, + ... poll_uri=poll_uri + ... ) + + See Also + -------- + .upload_media + .create_poll + """ + media_entities = [ + {'media_id': media_id, 'tagged_users': []} + for media_id in (media_ids or []) + ] + limit_mode = None + if conversation_control is not None: + conversation_control = conversation_control.lower() + limit_mode = { + 'followers': 'Community', + 'verified': 'Verified', + 'mentioned': 'ByInvitation' + }[conversation_control] + + response, _ = await self.gql.create_tweet( + is_note_tweet, text, media_entities, poll_uri, + reply_to, attachment_url, community_id, share_with_followers, + richtext_options, edit_tweet_id, limit_mode + ) + if 'errors' in response: + raise_exceptions_from_response(response['errors']) + raise CouldNotTweet( + response['errors'][0] if response['errors'] else 'Failed to post a tweet.' + ) + if is_note_tweet: + _result = response['data']['notetweet_create']['tweet_results'] + else: + _result = response['data']['create_tweet']['tweet_results'] + return tweet_from_data(self, _result) + + async def create_scheduled_tweet( + self, + scheduled_at: int, + text: str = '', + media_ids: list[str] | None = None, + ) -> str: + """ + Schedules a tweet to be posted at a specified timestamp. + + Parameters + ---------- + scheduled_at : :class:`int` + The timestamp when the tweet should be scheduled for posting. + text : :class:`str`, default='' + The text content of the tweet, by default an empty string. + media_ids : list[:class:`str`], default=None + A list of media IDs to be attached to the tweet, by default None. + + Returns + ------- + :class:`str` + The ID of the scheduled tweet. + + Examples + -------- + Create a tweet with media: + + >>> scheduled_time = int(time.time()) + 3600 # One hour from now + >>> tweet_text = 'Example text' + >>> media_ids = [ + ... await client.upload_media('image1.png'), + ... await client.upload_media('image2.png') + ... ] + >>> await client.create_scheduled_tweet( + ... scheduled_time + ... tweet_text, + ... media_ids=media_ids + ... ) + """ + response, _ = await self.gql.create_scheduled_tweet(scheduled_at, text, media_ids) + return response['data']['tweet']['rest_id'] + + async def delete_tweet(self, tweet_id: str) -> Response: + """Deletes a tweet. + + Parameters + ---------- + tweet_id : :class:`str` + ID of the tweet to be deleted. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '0000000000' + >>> await delete_tweet(tweet_id) + """ + _, response = await self.gql.delete_tweet(tweet_id) + return response + + async def get_user_by_screen_name(self, screen_name: str) -> User: + """ + Fetches a user by screen name. + + Parameter + --------- + screen_name : :class:`str` + The screen name of the Twitter user. + + Returns + ------- + :class:`User` + An instance of the User class representing the + Twitter user. + + Examples + -------- + >>> target_screen_name = 'example_user' + >>> user = await client.get_user_by_name(target_screen_name) + >>> print(user) + + """ + response, _ = await self.gql.user_by_screen_name(screen_name) + + if 'user' not in response['data']: + raise UserNotFound('The user does not exist.') + user_data = response['data']['user']['result'] + if user_data.get('__typename') == 'UserUnavailable': + raise UserUnavailable(user_data.get('message')) + + return User(self, user_data) + + async def get_user_by_id(self, user_id: str) -> User: + """ + Fetches a user by ID + + Parameter + --------- + user_id : :class:`str` + The ID of the Twitter user. + + Returns + ------- + :class:`User` + An instance of the User class representing the + Twitter user. + + Examples + -------- + >>> target_screen_name = '000000000' + >>> user = await client.get_user_by_id(target_screen_name) + >>> print(user) + + """ + response, _ = await self.gql.user_by_rest_id(user_id) + if 'result' not in response['data']['user']: + raise TwitterException(f'Invalid user id: {user_id}') + user_data = response['data']['user']['result'] + if user_data.get('__typename') == 'UserUnavailable': + raise UserUnavailable(user_data.get('message')) + return User(self, user_data) + + async def reverse_geocode( + self, lat: float, long: float, accuracy: str | float | None = None, + granularity: str | None = None, max_results: int | None = None + ) -> list[Place]: + """ + Given a latitude and a longitude, searches for up to 20 places that + + Parameters + ---------- + lat : :class:`float` + The latitude to search around. + long : :class:`float` + The longitude to search around. + accuracy : :class:`str` | :class:`float` None, default=None + A hint on the "region" in which to search. + granularity : :class:`str` | None, default=None + This is the minimal granularity of place types to return and must + be one of: `neighborhood`, `city`, `admin` or `country`. + max_results : :class:`int` | None, default=None + A hint as to the number of results to return. + + Returns + ------- + list[:class:`.Place`] + """ + response, _ = await self.v11.reverse_geocode(lat, long, accuracy, granularity, max_results) + return _places_from_response(self, response) + + async def search_geo( + self, lat: float | None = None, long: float | None = None, + query: str | None = None, ip: str | None = None, + granularity: str | None = None, max_results: int | None = None + ) -> list[Place]: + """ + Search for places that can be attached to a Tweet via POST + statuses/update. + + Parameters + ---------- + lat : :class:`float` | None + The latitude to search around. + long : :class:`float` | None + The longitude to search around. + query : :class:`str` | None + Free-form text to match against while executing a geo-based query, + best suited for finding nearby locations by name. + Remember to URL encode the query. + ip : :class:`str` | None + An IP address. Used when attempting to + fix geolocation based off of the user's IP address. + granularity : :class:`str` | None + This is the minimal granularity of place types to return and must + be one of: `neighborhood`, `city`, `admin` or `country`. + max_results : :class:`int` | None + A hint as to the number of results to return. + + Returns + ------- + list[:class:`.Place`] + """ + response, _ = await self.v11.search_geo(lat, long, query, ip, granularity, max_results) + return _places_from_response(self, response) + + async def get_place(self, id: str) -> Place: + """ + Parameters + ---------- + id : :class:`str` + The ID of the place. + + Returns + ------- + :class:`.Place` + """ + response, _ = await self.v11.get_place(id) + return Place(self, response) + + async def _get_more_replies( + self, tweet_id: str, cursor: str + ) -> Result[Tweet]: + response, _ = await self.gql.tweet_detail(tweet_id, cursor) + entries = find_dict(response, 'entries', find_one=True)[0] + + results = [] + for entry in entries: + if entry['entryId'].startswith(('cursor', 'label')): + continue + tweet = tweet_from_data(self, entry) + if tweet is not None: + results.append(tweet) + + if entries[-1]['entryId'].startswith('cursor'): + next_cursor = entries[-1]['content']['itemContent']['value'] + _fetch_next_result = partial(self._get_more_replies, tweet_id, next_cursor) + else: + next_cursor = None + _fetch_next_result = None + + return Result( + results, + _fetch_next_result, + next_cursor + ) + + async def _show_more_replies( + self, tweet_id: str, cursor: str + ) -> Result[Tweet]: + response, _ = await self.gql.tweet_detail(tweet_id, cursor) + items = find_dict(response, 'moduleItems', find_one=True)[0] + results = [] + for item in items: + if 'tweet' not in item['entryId']: + continue + tweet = tweet_from_data(self, item) + if tweet is not None: + results.append(tweet) + return Result(results) + + async def get_tweet_by_id( + self, tweet_id: str, cursor: str | None = None + ) -> Tweet: + """ + Fetches a tweet by tweet ID. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet. + + Returns + ------- + :class:`Tweet` + A Tweet object representing the fetched tweet. + + Examples + -------- + >>> target_tweet_id = '...' + >>> tweet = client.get_tweet_by_id(target_tweet_id) + >>> print(tweet) + + """ + response, _ = await self.gql.tweet_detail(tweet_id, cursor) + + if 'errors' in response: + raise TweetNotAvailable(response['errors'][0]['message']) + + entries = find_dict(response, 'entries', find_one=True)[0] + reply_to = [] + replies_list = [] + related_tweets = [] + tweet = None + + for entry in entries: + if entry['entryId'].startswith('cursor'): + continue + tweet_object = tweet_from_data(self, entry) + if tweet_object is None: + continue + + if entry['entryId'].startswith('tweetdetailrelatedtweets'): + related_tweets.append(tweet_object) + continue + + if entry['entryId'] == f'tweet-{tweet_id}': + tweet = tweet_object + else: + if tweet is None: + reply_to.append(tweet_object) + else: + replies = [] + sr_cursor = None + show_replies = None + + for reply in entry['content']['items'][1:]: + if 'tweetcomposer' in reply['entryId']: + continue + if 'tweet' in reply.get('entryId'): + rpl = tweet_from_data(self, reply) + if rpl is None: + continue + replies.append(rpl) + if 'cursor' in reply.get('entryId'): + sr_cursor = reply['item']['itemContent']['value'] + show_replies = partial( + self._show_more_replies, + tweet_id, + sr_cursor + ) + tweet_object.replies = Result( + replies, + show_replies, + sr_cursor + ) + replies_list.append(tweet_object) + + display_type = find_dict(entry, 'tweetDisplayType', True) + if display_type and display_type[0] == 'SelfThread': + tweet.thread = [tweet_object, *replies] + + if entries[-1]['entryId'].startswith('cursor'): + # if has more replies + reply_next_cursor = entries[-1]['content']['itemContent']['value'] + _fetch_more_replies = partial(self._get_more_replies, + tweet_id, reply_next_cursor) + else: + reply_next_cursor = None + _fetch_more_replies = None + + tweet.replies = Result( + replies_list, + _fetch_more_replies, + reply_next_cursor + ) + tweet.reply_to = reply_to + tweet.related_tweets = related_tweets + + return tweet + + async def get_tweets_by_ids(self, ids: list[str]) -> list[Tweet]: + """ + Retrieve multiple tweets by IDs. + + Parameters + ---------- + ids : list[:class:`str`] + A list of tweet IDs to retrieve. + + Returns + ------- + list[:class:`Tweet`] + List of tweets. + + Examples + -------- + >>> tweet_ids = ['1111111111', '1111111112', '111111113'] + >>> tweets = await client.get_tweets_by_ids(tweet_ids) + >>> print(tweets) + [, , ] + """ + response, _ = await self.gql.tweet_results_by_rest_ids(ids) + tweet_results = response['data']['tweetResult'] + results = [] + for tweet_result in tweet_results: + results.append(tweet_from_data(self, tweet_result)) + return results + + async def get_scheduled_tweets(self) -> list[ScheduledTweet]: + """ + Retrieves scheduled tweets. + + Returns + ------- + list[:class:`ScheduledTweet`] + List of ScheduledTweet objects representing the scheduled tweets. + """ + response, _ = await self.gql.fetch_scheduled_tweets() + tweets = find_dict(response, 'scheduled_tweet_list', find_one=True)[0] + return [ScheduledTweet(self, tweet) for tweet in tweets] + + async def delete_scheduled_tweet(self, tweet_id: str) -> Response: + """ + Delete a scheduled tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the scheduled tweet to delete. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + _, response = await self.gql.delete_scheduled_tweet(tweet_id) + return response + + async def _get_tweet_engagements( + self, tweet_id: str, count: int, cursor: str, f + ) -> Result[User]: + """ + Base function to get tweet engagements. + type0: retweeters + type1: favoriters + """ + response, _ = await f(tweet_id, count, cursor) + items_ = find_dict(response, 'entries', True) + if not items_: + return Result([]) + items = items_[0] + next_cursor = items[-1]['content']['value'] + previous_cursor = items[-2]['content']['value'] + + results = [] + for item in items: + if not item['entryId'].startswith('user'): + continue + user_info_ = find_dict(item, 'result', True) + if not user_info_: + continue + user_info = user_info_[0] + results.append(User(self, user_info)) + + return Result( + results, + partial(self._get_tweet_engagements, tweet_id, count, next_cursor, f), + next_cursor, + partial(self._get_tweet_engagements, tweet_id, count, previous_cursor, f), + previous_cursor + ) + + async def get_retweeters( + self, tweet_id: str, count: int = 40, cursor: str | None = None + ) -> Result[User]: + """ + Retrieve users who retweeted a specific tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet. + count : :class:`int`, default=40 + The maximum number of users to retrieve. + cursor : :class:`str`, default=None + A string indicating the position of the cursor for pagination. + + Returns + ------- + Result[:class:`User`] + A list of users who retweeted the tweet. + + Examples + -------- + >>> tweet_id = '...' + >>> retweeters = client.get_retweeters(tweet_id) + >>> print(retweeters) + [, , ..., ] + + >>> more_retweeters = retweeters.next() # Retrieve more retweeters. + >>> print(more_retweeters) + [, , ..., ] + """ + return await self._get_tweet_engagements(tweet_id, count, cursor, self.gql.retweeters) + + async def get_favoriters( + self, tweet_id: str, count: int = 40, cursor: str | None = None + ) -> Result[User]: + """ + Retrieve users who favorited a specific tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet. + count : int, default=40 + The maximum number of users to retrieve. + cursor : :class:`str`, default=None + A string indicating the position of the cursor for pagination. + + Returns + ------- + Result[:class:`User`] + A list of users who favorited the tweet. + + Examples + -------- + >>> tweet_id = '...' + >>> favoriters = await client.get_favoriters(tweet_id) + >>> print(favoriters) + [, , ..., ] + + >>> # Retrieve more favoriters. + >>> more_favoriters = await favoriters.next() + >>> print(more_favoriters) + [, , ..., ] + """ + return await self._get_tweet_engagements(tweet_id, count, cursor, self.gql.favoriters) + + async def get_community_note(self, note_id: str) -> CommunityNote: + """ + Fetches a community note by ID. + + Parameters + ---------- + note_id : :class:`str` + The ID of the community note. + + Returns + ------- + :class:`CommunityNote` + A CommunityNote object representing the fetched community note. + + Raises + ------ + :exc:`TwitterException` + Invalid note ID. + + Examples + -------- + >>> note_id = '...' + >>> note = client.get_community_note(note_id) + >>> print(note) + + """ + response, _ = await self.gql.bird_watch_one_note(note_id) + note_data = response['data']['birdwatch_note_by_rest_id'] + if 'data_v1' not in note_data: + raise TwitterException(f'Invalid note id: {note_id}') + return CommunityNote(self, note_data) + + async def get_user_tweets( + self, + user_id: str, + tweet_type: Literal['Tweets', 'Replies', 'Media', 'Likes'], + count: int = 40, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Fetches tweets from a specific user's timeline. + + Parameters + ---------- + user_id : :class:`str` + The ID of the Twitter user whose tweets to retrieve. + To get the user id from the screen name, you can use + `get_user_by_screen_name` method. + tweet_type : {'Tweets', 'Replies', 'Media', 'Likes'} + The type of tweets to retrieve. + count : :class:`int`, default=40 + The number of tweets to retrieve. + cursor : :class:`str`, default=None + The cursor for fetching the next set of results. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing a list of `Tweet` objects. + + Examples + -------- + >>> user_id = '...' + + If you only have the screen name, you can get the user id as follows: + + >>> screen_name = 'example_user' + >>> user = client.get_user_by_screen_name(screen_name) + >>> user_id = user.id + + >>> tweets = await client.get_user_tweets(user_id, 'Tweets', count=20) + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + + >>> # Retrieve previous tweets + >>> previous_tweets = await tweets.previous() + + See Also + -------- + .get_user_by_screen_name + """ + tweet_type = tweet_type.capitalize() + f = { + 'Tweets': self.gql.user_tweets, + 'Replies': self.gql.user_tweets_and_replies, + 'Media': self.gql.user_media, + 'Likes': self.gql.user_likes, + }[tweet_type] + response, _ = await f(user_id, count, cursor) + + instructions_ = find_dict(response, 'instructions', True) + if not instructions_: + return Result([]) + instructions = instructions_[0] + + items = instructions[-1]['entries'] + next_cursor = items[-1]['content']['value'] + previous_cursor = items[-2]['content']['value'] + + if tweet_type == 'Media': + if cursor is None: + items = items[0]['content']['items'] + else: + items = instructions[0]['moduleItems'] + + results = [] + for item in items: + entry_id = item['entryId'] + + if not entry_id.startswith(('tweet', 'profile-conversation', 'profile-grid')): + continue + + if entry_id.startswith('profile-conversation'): + tweets = item['content']['items'] + replies = [] + for reply in tweets[1:]: + tweet_object = tweet_from_data(self, reply) + if tweet_object is None: + continue + replies.append(tweet_object) + item = tweets[0] + else: + replies = None + + tweet = tweet_from_data(self, item) + if tweet is None: + continue + tweet.replies = replies + results.append(tweet) + + return Result( + results, + partial(self.get_user_tweets, user_id, tweet_type, count, next_cursor), + next_cursor, + partial(self.get_user_tweets, user_id, tweet_type, count, previous_cursor), + previous_cursor + ) + + async def get_timeline( + self, + count: int = 20, + seen_tweet_ids: list[str] | None = None, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves the timeline. + Retrieves tweets from Home -> For You. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of tweets to retrieve. + seen_tweet_ids : list[:class:`str`], default=None + A list of tweet IDs that have been seen. + cursor : :class:`str`, default=None + A cursor for pagination. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing a list of Tweet objects. + + Example + ------- + >>> tweets = await client.get_timeline() + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + """ + response, _ = await self.gql.home_timeline(count, seen_tweet_ids, cursor) + items = find_dict(response, 'entries', find_one=True)[0] + next_cursor = items[-1]['content']['value'] + results = [] + + for item in items: + if 'itemContent' not in item['content']: + continue + tweet = tweet_from_data(self, item) + if tweet is None: + continue + results.append(tweet) + + return Result( + results, + partial(self.get_timeline, count, seen_tweet_ids, next_cursor), + next_cursor + ) + + async def get_latest_timeline( + self, + count: int = 20, + seen_tweet_ids: list[str] | None = None, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves the timeline. + Retrieves tweets from Home -> Following. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of tweets to retrieve. + seen_tweet_ids : list[:class:`str`], default=None + A list of tweet IDs that have been seen. + cursor : :class:`str`, default=None + A cursor for pagination. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing a list of Tweet objects. + + Example + ------- + >>> tweets = await client.get_latest_timeline() + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + """ + response, _ = await self.gql.home_latest_timeline(count, seen_tweet_ids, cursor) + items = find_dict(response, 'entries', find_one=True)[0] + next_cursor = items[-1]['content']['value'] + results = [] + + for item in items: + if 'itemContent' not in item['content']: + continue + tweet = tweet_from_data(self, item) + if tweet is None: + continue + results.append(tweet) + + return Result( + results, + partial(self.get_latest_timeline, count, seen_tweet_ids, next_cursor), + next_cursor + ) + + async def favorite_tweet(self, tweet_id: str) -> Response: + """ + Favorites a tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet to be liked. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '...' + >>> await client.favorite_tweet(tweet_id) + + See Also + -------- + .unfavorite_tweet + """ + _, response = await self.gql.favorite_tweet(tweet_id) + return response + + async def unfavorite_tweet(self, tweet_id: str) -> Response: + """ + Unfavorites a tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet to be unliked. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '...' + >>> await client.unfavorite_tweet(tweet_id) + + See Also + -------- + .favorite_tweet + """ + _, response = await self.gql.unfavorite_tweet(tweet_id) + return response + + async def retweet(self, tweet_id: str) -> Response: + """ + Retweets a tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet to be retweeted. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '...' + >>> await client.retweet(tweet_id) + + See Also + -------- + .delete_retweet + """ + _, response = await self.gql.retweet(tweet_id) + return response + + async def delete_retweet(self, tweet_id: str) -> Response: + """ + Deletes the retweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the retweeted tweet to be unretweeted. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '...' + >>> await client.delete_retweet(tweet_id) + + See Also + -------- + .retweet + """ + _, response = await self.gql.delete_retweet(tweet_id) + return response + + async def bookmark_tweet( + self, tweet_id: str, folder_id: str | None = None + ) -> Response: + """ + Adds the tweet to bookmarks. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet to be bookmarked. + folder_id : :class:`str` | None, default=None + The ID of the folder to add the bookmark to. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '...' + >>> await client.bookmark_tweet(tweet_id) + """ + if folder_id is None: + _, response = await self.gql.create_bookmark(tweet_id) + else: + _, response = await self.gql.bookmark_tweet_to_folder(tweet_id, folder_id) + return response + + async def delete_bookmark(self, tweet_id: str) -> Response: + """ + Removes the tweet from bookmarks. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet to be removed from bookmarks. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> tweet_id = '...' + >>> await client.delete_bookmark(tweet_id) + + See Also + -------- + .bookmark_tweet + """ + _, response = await self.gql.delete_bookmark(tweet_id) + return response + + async def get_bookmarks( + self, count: int = 20, + cursor: str | None = None, folder_id: str | None = None + ) -> Result[Tweet]: + """ + Retrieves bookmarks from the authenticated user's Twitter account. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of bookmarks to retrieve. + folder_id : :class:`str` | None, default=None + Folder to retrieve bookmarks. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing a list of Tweet objects + representing bookmarks. + + Example + ------- + >>> bookmarks = await client.get_bookmarks() + >>> for bookmark in bookmarks: + ... print(bookmark) + + + + >>> # # To retrieve more bookmarks + >>> more_bookmarks = await bookmarks.next() + >>> for bookmark in more_bookmarks: + ... print(bookmark) + + + """ + if folder_id is None: + response, _ = await self.gql.bookmarks(count, cursor) + else: + response, _ = await self.gql.bookmark_folder_timeline(count, cursor, folder_id) + + items_ = find_dict(response, 'entries', find_one=True) + if not items_: + return Result([]) + items = items_[0] + next_cursor = items[-1]['content']['value'] + if folder_id is None: + previous_cursor = items[-2]['content']['value'] + fetch_previous_result = partial(self.get_bookmarks, count, previous_cursor, folder_id) + else: + previous_cursor = None + fetch_previous_result = None + + results = [] + for item in items: + tweet = tweet_from_data(self, item) + if tweet is None: + continue + results.append(tweet) + + return Result( + results, + partial(self.get_bookmarks, count, next_cursor, folder_id), + next_cursor, + fetch_previous_result, + previous_cursor + ) + + async def delete_all_bookmarks(self) -> Response: + """ + Deleted all bookmarks. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> await client.delete_all_bookmarks() + """ + _, response = await self.gql.delete_all_bookmarks() + return response + + async def get_bookmark_folders(self, cursor: str | None = None) -> Result[BookmarkFolder]: + """ + Retrieves bookmark folders. + + Returns + ------- + Result[:class:`BookmarkFolder`] + Result object containing a list of bookmark folders. + + Examples + -------- + >>> folders = await client.get_bookmark_folders() + >>> print(folders) + [, ..., ] + >>> more_folders = await folders.next() # Retrieve more folders + """ + response, _ = await self.gql.bookmark_folders_slice(cursor) + + slice = find_dict(response, 'bookmark_collections_slice', find_one=True)[0] + results = [] + for item in slice['items']: + results.append(BookmarkFolder(self, item)) + + if 'next_cursor' in slice['slice_info']: + next_cursor = slice['slice_info']['next_cursor'] + fetch_next_result = partial(self.get_bookmark_folders, next_cursor) + else: + next_cursor = None + fetch_next_result = None + + return Result( + results, + fetch_next_result, + next_cursor + ) + + async def edit_bookmark_folder( + self, folder_id: str, name: str + ) -> BookmarkFolder: + """ + Edits a bookmark folder. + + Parameters + ---------- + folder_id : :class:`str` + ID of the folder to edit. + name : :class:`str` + New name for the folder. + + Returns + ------- + :class:`BookmarkFolder` + Updated bookmark folder. + + Examples + -------- + >>> await client.edit_bookmark_folder('123456789', 'MyFolder') + """ + response, _ = await self.gql.edit_bookmark_folder(folder_id, name) + return BookmarkFolder(self, response['data']['bookmark_collection_update']) + + async def delete_bookmark_folder(self, folder_id: str) -> Response: + """ + Deletes a bookmark folder. + + Parameters + ---------- + folder_id : :class:`str` + ID of the folder to delete. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + _, response = await self.gql.delete_bookmark_folder(folder_id) + return response + + async def create_bookmark_folder(self, name: str) -> BookmarkFolder: + """Creates a bookmark folder. + + Parameters + ---------- + name : :class:`str` + Name of the folder. + + Returns + ------- + :class:`BookmarkFolder` + Newly created bookmark folder. + """ + response, _ = await self.gql.create_bookmark_folder(name) + return BookmarkFolder(self, response['data']['bookmark_collection_create']) + + async def follow_user(self, user_id: str) -> User: + """ + Follows a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to follow. + + Returns + ------- + :class:`User` + The followed user. + + Examples + -------- + >>> user_id = '...' + >>> await client.follow_user(user_id) + + See Also + -------- + .unfollow_user + """ + response, _ = await self.v11.create_friendships(user_id) + return User(self, build_user_data(response)) + + async def unfollow_user(self, user_id: str) -> User: + """ + Unfollows a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to unfollow. + + Returns + ------- + :class:`User` + The unfollowed user. + + Examples + -------- + >>> user_id = '...' + >>> await client.unfollow_user(user_id) + + See Also + -------- + .follow_user + """ + response, _ = await self.v11.destroy_friendships(user_id) + return User(self, build_user_data(response)) + + async def block_user(self, user_id: str) -> User: + """ + Blocks a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to block. + + Returns + ------- + :class:`User` + The blocked user. + + See Also + -------- + .unblock_user + """ + response, _ = await self.v11.create_blocks(user_id) + return User(self, build_user_data(response)) + + async def unblock_user(self, user_id: str) -> User: + """ + Unblocks a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to unblock. + + Returns + ------- + :class:`User` + The unblocked user. + + See Also + -------- + .block_user + """ + response, _ = await self.v11.destroy_blocks(user_id) + return User(self, build_user_data(response)) + + async def mute_user(self, user_id: str) -> User: + """ + Mutes a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to mute. + + Returns + ------- + :class:`User` + The muted user. + + See Also + -------- + .unmute_user + """ + response, _ = await self.v11.create_mutes(user_id) + return User(self, build_user_data(response)) + + async def unmute_user(self, user_id: str) -> User: + """ + Unmutes a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to unmute. + + Returns + ------- + :class:`User` + The unmuted user. + + See Also + -------- + .mute_user + """ + response, _ = await self.v11.destroy_mutes(user_id) + return User(self, build_user_data(response)) + + async def get_trends( + self, + category: Literal['trending', 'for-you', 'news', 'sports', 'entertainment'], + count: int = 20, + retry: bool = True, + additional_request_params: dict | None = None + ) -> list[Trend]: + """ + Retrieves trending topics on Twitter. + + Parameters + ---------- + category : {'trending', 'for-you', 'news', 'sports', 'entertainment'} + The category of trends to retrieve. Valid options include: + - 'trending': General trending topics. + - 'for-you': Trends personalized for the user. + - 'news': News-related trends. + - 'sports': Sports-related trends. + - 'entertainment': Entertainment-related trends. + count : :class:`int`, default=20 + The number of trends to retrieve. + retry : :class:`bool`, default=True + If no trends are fetched continuously retry to fetch trends. + additional_request_params : :class:`dict`, default=None + Parameters to be added on top of the existing trends API + parameters. Typically, it is used as `additional_request_params = + {'candidate_source': 'trends'}` when this function doesn't work + otherwise. + + Returns + ------- + list[:class:`Trend`] + A list of Trend objects representing the retrieved trends. + + Examples + -------- + >>> trends = await client.get_trends('trending') + >>> for trend in trends: + ... print(trend) + + + ... + """ + category = category.lower() + if category in ['news', 'sports', 'entertainment']: + category += '_unified' + response, _ = await self.v11.guide(category, count, additional_request_params) + + entry_id_prefix = 'trends' if category == 'trending' else 'Guide' + entries = [ + i for i in find_dict(response, 'entries', find_one=True)[0] + if i['entryId'].startswith(entry_id_prefix) + ] + + if not entries: + if not retry: + return [] + # Recall the method again, as the trend information + # may not be returned due to a Twitter error. + return await self.get_trends(category, count, retry, additional_request_params) + + items = entries[-1]['content']['timelineModule']['items'] + + results = [] + for item in items: + trend_info = item['item']['content']['trend'] + results.append(Trend(self, trend_info)) + + return results + + async def get_available_locations(self) -> list[Location]: + """ + Retrieves locations where trends can be retrieved. + + Returns + ------- + list[:class:`.Location`] + """ + response, _ = await self.v11.available_trends() + return [Location(self, data) for data in response] + + async def get_place_trends(self, woeid: int) -> PlaceTrends: + """ + Retrieves the top 50 trending topics for a specific id. + You can get available woeid using + :attr:`.Client.get_available_locations`. + """ + response, _ = await self.v11.place_trends(woeid) + trend_data = response[0] + trends = [PlaceTrend(self, data) for data in trend_data['trends']] + trend_data['trends'] = trends + return trend_data + + async def _get_user_friendship( + self, + user_id: str, + count: int, + f, + cursor: str | None + ) -> Result[User]: + """ + Base function to get friendship. + """ + response, _ = await f(user_id, count, cursor) + + items_ = find_dict(response, 'entries', find_one=True) + if not items_: + return Result.empty() + items = items_[0] + results = [] + for item in items: + entry_id = item['entryId'] + if entry_id.startswith('user'): + user_info = find_dict(item, 'result', find_one=True) + if not user_info: + warnings.warn( + 'Some followers are excluded because ' + '"Quality Filter" is enabled. To get all followers, ' + 'turn off it in the Twitter settings.' + ) + continue + if user_info[0].get('__typename') == 'UserUnavailable': + continue + results.append(User(self, user_info[0])) + elif entry_id.startswith('cursor-bottom'): + next_cursor = item['content']['value'] + + return Result( + results, + partial(self._get_user_friendship, user_id, count, f, next_cursor), + next_cursor + ) + + async def _get_user_friendship_2( + self, user_id: str, screen_name: str, + count: int, f, cursor: str + ) -> Result[User]: + response, _ = await f(user_id, screen_name, count, cursor) + users = response['users'] + results = [] + for user in users: + results.append(User(self, build_user_data(user))) + + previous_cursor = response['previous_cursor'] + next_cursor = response['next_cursor'] + + return Result( + results, + partial(self._get_user_friendship_2, user_id, screen_name, count, f, next_cursor), + next_cursor, + partial(self._get_user_friendship_2, user_id, screen_name, count, f, previous_cursor), + previous_cursor + ) + + async def get_user_followers( + self, user_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves a list of followers for a given user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user for whom to retrieve followers. + count : int, default=20 + The number of followers to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the followers. + """ + return await self._get_user_friendship( + user_id, count, self.gql.followers, cursor + ) + + async def get_latest_followers( + self, user_id: str | None = None, screen_name: str | None = None, + count: int = 200, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves the latest followers. + Max count : 200 + """ + return await self._get_user_friendship_2( + user_id, screen_name, count, self.v11.followers_list, cursor + ) + + async def get_latest_friends( + self, user_id: str | None = None, screen_name: str | None = None, + count: int = 200, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves the latest friends (following users). + Max count : 200 + """ + return await self._get_user_friendship_2( + user_id, screen_name, count, self.v11.friends_list, cursor + ) + + async def get_user_verified_followers( + self, user_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves a list of verified followers for a given user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user for whom to retrieve verified followers. + count : :class:`int`, default=20 + The number of verified followers to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the verified followers. + """ + return await self._get_user_friendship( + user_id, count, self.gql.blue_verified_followers, cursor + ) + + async def get_user_followers_you_know( + self, user_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves a list of common followers. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user for whom to retrieve followers you might know. + count : :class:`int`, default=20 + The number of followers you might know to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the followers you might know. + """ + return await self._get_user_friendship( + user_id, count, self.gql.followers_you_know, cursor + ) + + async def get_user_following( + self, user_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves a list of users whom the given user is following. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user for whom to retrieve the following users. + count : :class:`int`, default=20 + The number of following users to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the users being followed. + """ + return await self._get_user_friendship( + user_id, count, self.gql.following, cursor + ) + + async def get_user_subscriptions( + self, user_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves a list of users to which the specified user is subscribed. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user for whom to retrieve subscriptions. + count : :class:`int`, default=20 + The number of subscriptions to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the subscribed users. + """ + return await self._get_user_friendship( + user_id, count, self.gql.user_creator_subscriptions, cursor + ) + + async def _get_friendship_ids( + self, + user_id: str | None, + screen_name: str | None, + count: int, + f, + cursor: str | None + ) -> Result[int]: + response, _ = await f(user_id, screen_name, count, cursor) + previous_cursor = response['previous_cursor'] + next_cursor = response['next_cursor'] + + return Result( + response['ids'], + partial(self._get_friendship_ids, user_id, screen_name, count, f, next_cursor), + next_cursor, + partial(self._get_friendship_ids, user_id, screen_name, count, f, previous_cursor), + previous_cursor + ) + + async def get_followers_ids( + self, + user_id: str | None = None, + screen_name: str | None = None, + count: int = 5000, + cursor: str | None = None + ) -> Result[int]: + """ + Fetches the IDs of the followers of a specified user. + + Parameters + ---------- + user_id : :class:`str` | None, default=None + The ID of the user for whom to return results. + screen_name : :class:`str` | None, default=None + The screen name of the user for whom to return results. + count : :class:`int`, default=5000 + The maximum number of IDs to retrieve. + + Returns + ------- + :class:`Result`[:class:`int`] + A Result object containing the IDs of the followers. + """ + return await self._get_friendship_ids(user_id, screen_name, count, self.v11.followers_ids, cursor) + + async def get_friends_ids( + self, + user_id: str | None = None, + screen_name: str | None = None, + count: int = 5000, + cursor: str | None = None + ) -> Result[int]: + """ + Fetches the IDs of the friends (following users) of a specified user. + + Parameters + ---------- + user_id : :class:`str` | None, default=None + The ID of the user for whom to return results. + screen_name : :class:`str` | None, default=None + The screen name of the user for whom to return results. + count : :class:`int`, default=5000 + The maximum number of IDs to retrieve. + + Returns + ------- + :class:`Result`[:class:`int`] + A Result object containing the IDs of the friends. + """ + return await self._get_friendship_ids( + user_id, screen_name, count, self.v11.friends_ids, cursor + ) + + async def _send_dm( + self, + conversation_id: str, + text: str, + media_id: str | None, + reply_to: str | None + ) -> dict: + """ + Base function to send dm. + """ + response, _ = await self.v11.dm_new(conversation_id, text, media_id, reply_to) + return response + + async def _get_dm_history( + self, + conversation_id: str, + max_id: str | None = None + ) -> dict: + """ + Base function to get dm history. + """ + response, _ = await self.v11.dm_conversation(conversation_id, max_id) + return response + + async def send_dm( + self, + user_id: str, + text: str, + media_id: str | None = None, + reply_to: str | None = None + ) -> Message: + """ + Send a direct message to a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to whom the direct message will be sent. + text : :class:`str` + The text content of the direct message. + media_id : :class:`str`, default=None + The media ID associated with any media content + to be included in the message. + Media ID can be received by using the :func:`.upload_media` method. + reply_to : :class:`str`, default=None + Message ID to reply to. + + Returns + ------- + :class:`Message` + `Message` object containing information about the message sent. + + Examples + -------- + >>> # send DM with media + >>> user_id = '000000000' + >>> media_id = await client.upload_media('image.png') + >>> message = await client.send_dm(user_id, 'text', media_id) + >>> print(message) + + + See Also + -------- + .upload_media + .delete_dm + """ + response = await self._send_dm( + f'{user_id}-{await self.user_id()}', text, media_id, reply_to + ) + + message_data = find_dict(response, 'message_data', find_one=True)[0] + users = list(response['users'].values()) + return Message( + self, + message_data, + users[0]['id_str'], + users[1]['id_str'] if len(users) == 2 else users[0]['id_str'] + ) + + async def add_reaction_to_message( + self, message_id: str, conversation_id: str, emoji: str + ) -> Response: + """ + Adds a reaction emoji to a specific message in a conversation. + + Parameters + ---------- + message_id : :class:`str` + The ID of the message to which the reaction emoji will be added. + Group ID ('00000000') or partner_ID-your_ID ('00000000-00000001') + conversation_id : :class:`str` + The ID of the conversation containing the message. + emoji : :class:`str` + The emoji to be added as a reaction. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> message_id = '00000000' + >>> conversation_id = f'00000001-{await client.user_id()}' + >>> await client.add_reaction_to_message( + ... message_id, conversation_id, 'Emoji here' + ... ) + """ + _, response = await self.gql.user_dm_reaction_mutation_add_mutation( + message_id, conversation_id, emoji + ) + return response + + async def remove_reaction_from_message( + self, message_id: str, conversation_id: str, emoji: str + ) -> Response: + """ + Remove a reaction from a message. + + Parameters + ---------- + message_id : :class:`str` + The ID of the message from which to remove the reaction. + conversation_id : :class:`str` + The ID of the conversation where the message is located. + Group ID ('00000000') or partner_ID-your_ID ('00000000-00000001') + emoji : :class:`str` + The emoji to remove as a reaction. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> message_id = '00000000' + >>> conversation_id = f'00000001-{await client.user_id()}' + >>> await client.remove_reaction_from_message( + ... message_id, conversation_id, 'Emoji here' + ... ) + """ + _, response = await self.gql.user_dm_reaction_mutation_remove_mutation( + message_id, conversation_id, emoji + ) + return response + + async def delete_dm(self, message_id: str) -> Response: + """ + Deletes a direct message with the specified message ID. + + Parameters + ---------- + message_id : :class:`str` + The ID of the direct message to be deleted. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> await client.delete_dm('0000000000') + """ + _, response = await self.gql.dm_message_delete_mutation(message_id) + return response + + async def get_dm_history( + self, + user_id: str, + max_id: str | None = None + ) -> Result[Message]: + """ + Retrieves the DM conversation history with a specific user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user with whom the DM conversation + history will be retrieved. + max_id : :class:`str`, default=None + If specified, retrieves messages older than the specified max_id. + + Returns + ------- + Result[:class:`Message`] + A Result object containing a list of Message objects representing + the DM conversation history. + + Examples + -------- + >>> messages = await client.get_dm_history('0000000000') + >>> for message in messages: + >>> print(message) + + + ... + ... + + >>> more_messages = await messages.next() # Retrieve more messages + >>> for message in more_messages: + >>> print(message) + + + ... + ... + """ + response = await self._get_dm_history( + f'{user_id}-{await self.user_id()}', max_id + ) + + if 'entries' not in response['conversation_timeline']: + return Result([]) + items = response['conversation_timeline']['entries'] + + messages = [] + for item in items: + message_info = item['message']['message_data'] + messages.append(Message( + self, + message_info, + message_info['sender_id'], + message_info['recipient_id'] + )) + + return Result( + messages, + partial(self.get_dm_history, user_id, messages[-1].id), + messages[-1].id + ) + + async def send_dm_to_group( + self, + group_id: str, + text: str, + media_id: str | None = None, + reply_to: str | None = None + ) -> GroupMessage: + """ + Sends a message to a group. + + Parameters + ---------- + group_id : :class:`str` + The ID of the group in which the direct message will be sent. + text : :class:`str` + The text content of the direct message. + media_id : :class:`str`, default=None + The media ID associated with any media content + to be included in the message. + Media ID can be received by using the :func:`.upload_media` method. + reply_to : :class:`str`, default=None + Message ID to reply to. + + Returns + ------- + :class:`GroupMessage` + `GroupMessage` object containing information about + the message sent. + + Examples + -------- + >>> # send DM with media + >>> group_id = '000000000' + >>> media_id = await client.upload_media('image.png') + >>> message = await client.send_dm_to_group(group_id, 'text', media_id) + >>> print(message) + + + See Also + -------- + .upload_media + .delete_dm + """ + response = await self._send_dm(group_id, text, media_id, reply_to) + + message_data = find_dict(response, 'message_data', find_one=True)[0] + users = list(response['users'].values()) + return GroupMessage( + self, + message_data, + users[0]['id_str'], + group_id + ) + + async def get_group_dm_history( + self, + group_id: str, + max_id: str | None = None + ) -> Result[GroupMessage]: + """ + Retrieves the DM conversation history in a group. + + Parameters + ---------- + group_id : :class:`str` + The ID of the group in which the DM conversation + history will be retrieved. + max_id : :class:`str`, default=None + If specified, retrieves messages older than the specified max_id. + + Returns + ------- + Result[:class:`GroupMessage`] + A Result object containing a list of GroupMessage objects + representing the DM conversation history. + + Examples + -------- + >>> messages = await client.get_group_dm_history('0000000000') + >>> for message in messages: + >>> print(message) + + + ... + ... + + >>> more_messages = await messages.next() # Retrieve more messages + >>> for message in more_messages: + >>> print(message) + + + ... + ... + """ + response = await self._get_dm_history(group_id, max_id) + if 'entries' not in response['conversation_timeline']: + return Result([]) + + items = response['conversation_timeline']['entries'] + messages = [] + for item in items: + if 'message' not in item: + continue + message_info = item['message']['message_data'] + messages.append(GroupMessage( + self, + message_info, + message_info['sender_id'], + group_id + )) + + return Result( + messages, + partial(self.get_group_dm_history, group_id, messages[-1].id), + messages[-1].id + ) + + async def get_group(self, group_id: str) -> Group: + """ + Fetches a guild by ID. + + Parameters + ---------- + group_id : :class:`str` + The ID of the group to retrieve information for. + + Returns + ------- + :class:`Group` + An object representing the retrieved group. + """ + response = await self._get_dm_history(group_id) + return Group(self, group_id, response) + + async def add_members_to_group( + self, group_id: str, user_ids: list[str] + ) -> Response: + """Adds members to a group. + + Parameters + ---------- + group_id : :class:`str` + ID of the group to which the member is to be added. + user_ids : list[:class:`str`] + List of IDs of users to be added. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> group_id = '...' + >>> members = ['...'] + >>> await client.add_members_to_group(group_id, members) + """ + _, response = await self.gql.add_participants_mutation(group_id, user_ids) + return response + + async def change_group_name(self, group_id: str, name: str) -> Response: + """Changes group name + + Parameters + ---------- + group_id : :class:`str` + ID of the group to be renamed. + name : :class:`str` + New name. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + _, response = await self.v11.conversation_update_name(group_id, name) + return response + + async def create_list( + self, name: str, description: str = '', is_private: bool = False + ) -> List: + """ + Creates a list. + + Parameters + ---------- + name : :class:`str` + The name of the list. + description : :class:`str`, default='' + The description of the list. + is_private : :class:`bool`, default=False + Indicates whether the list is private (True) or public (False). + + Returns + ------- + :class:`List` + The created list. + + Examples + -------- + >>> list = await client.create_list( + ... 'list name', + ... 'list description', + ... is_private=True + ... ) + >>> print(list) + + """ + response, _ = await self.gql.create_list(name, description, is_private) + list_info = find_dict(response, 'list', find_one=True)[0] + return List(self, list_info) + + async def edit_list_banner(self, list_id: str, media_id: str) -> Response: + """ + Edit the banner image of a list. + + Parameters + ---------- + list_id : :class:`str` + The ID of the list. + media_id : :class:`str` + The ID of the media to use as the new banner image. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> list_id = '...' + >>> media_id = await client.upload_media('image.png') + >>> await client.edit_list_banner(list_id, media_id) + """ + _, response = await self.gql.edit_list_banner(list_id, media_id) + return response + + async def delete_list_banner(self, list_id: str) -> Response: + """Deletes list banner. + + Parameters + ---------- + list_id : :class:`str` + ID of the list from which the banner is to be removed. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + _, response = await self.gql.delete_list_banner(list_id) + return response + + async def edit_list( + self, + list_id: str, + name: str | None = None, + description: str | None = None, + is_private: bool | None = None + ) -> List: + """ + Edits list information. + + Parameters + ---------- + list_id : :class:`str` + The ID of the list to edit. + name : :class:`str`, default=None + The new name for the list. + description : :class:`str`, default=None + The new description for the list. + is_private : :class:`bool`, default=None + Indicates whether the list should be private + (True) or public (False). + + Returns + ------- + :class:`List` + The updated Twitter list. + + Examples + -------- + >>> await client.edit_list( + ... 'new name', 'new description', True + ... ) + """ + response, _ = await self.gql.update_list(list_id, name, description, is_private) + list_info = find_dict(response, 'list', find_one=True)[0] + return List(self, list_info) + + async def add_list_member(self, list_id: str, user_id: str) -> List: + """ + Adds a user to a list. + + Parameters + ---------- + list_id : :class:`str` + The ID of the list. + user_id : :class:`str` + The ID of the user to add to the list. + + Returns + ------- + :class:`List` + The updated Twitter list. + + Examples + -------- + >>> await client.add_list_member('list id', 'user id') + """ + response, _ = await self.gql.list_add_member(list_id, user_id) + return List(self, response['data']['list']) + + async def remove_list_member(self, list_id: str, user_id: str) -> List: + """ + Removes a user from a list. + + Parameters + ---------- + list_id : :class:`str` + The ID of the list. + user_id : :class:`str` + The ID of the user to remove from the list. + + Returns + ------- + :class:`List` + The updated Twitter list. + + Examples + -------- + >>> await client.remove_list_member('list id', 'user id') + """ + response, _ = await self.gql.list_remove_member(list_id, user_id) + if 'errors' in response: + raise TwitterException(response['errors'][0]['message']) + return List(self, response['data']['list']) + + async def get_lists( + self, count: int = 100, cursor: str = None + ) -> Result[List]: + """ + Retrieves a list of user lists. + + Parameters + ---------- + count : :class:`int` + The number of lists to retrieve. + + Returns + ------- + Result[:class:`List`] + Retrieved lists. + + Examples + -------- + >>> lists = client.get_lists() + >>> for list_ in lists: + ... print(list_) + + + ... + ... + >>> more_lists = lists.next() # Retrieve more lists + """ + response, _ = await self.gql.list_management_pace_timeline(count, cursor) + + entries = find_dict(response, 'entries', find_one=True)[0] + items = find_dict(entries, 'items') + + if len(items) < 2: + return Result([]) + + lists = [] + for list in items[1]: + lists.append(List(self, list['item']['itemContent']['list'])) + + next_cursor = entries[-1]['content']['value'] + + return Result( + lists, + partial(self.get_lists, count, next_cursor), + next_cursor + ) + + async def get_list(self, list_id: str) -> List: + """ + Retrieve list by ID. + + Parameters + ---------- + list_id : :class:`str` + The ID of the list to retrieve. + + Returns + ------- + :class:`List` + List object. + """ + response, _ = await self.gql.list_by_rest_id(list_id) + list_data_ = find_dict(response, 'list', find_one=True) + if not list_data_: + raise ValueError(f'Invalid list id: {list_id}') + return List(self, list_data_[0]) + + async def get_list_tweets( + self, list_id: str, count: int = 20, cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves tweets from a list. + + Parameters + ---------- + list_id : :class:`str` + The ID of the list to retrieve tweets from. + count : :class:`int`, default=20 + The number of tweets to retrieve. + cursor : :class:`str`, default=None + The cursor for pagination. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing the retrieved tweets. + + Examples + -------- + >>> tweets = await client.get_list_tweets('list id') + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + """ + response, _ = await self.gql.list_latest_tweets_timeline(list_id, count, cursor) + + items_ = find_dict(response, 'entries', find_one=True) + if not items_: + raise ValueError(f'Invalid list id: {list_id}') + items = items_[0] + next_cursor = items[-1]['content']['value'] + + results = [] + for item in items: + if not item['entryId'].startswith('tweet'): + continue + + tweet = tweet_from_data(self, item) + if tweet is not None: + results.append(tweet) + + return Result( + results, + partial(self.get_list_tweets, list_id, count, next_cursor), + next_cursor + ) + + async def _get_list_users(self, f: str, list_id: str, count: int, cursor: str) -> Result[User]: + """ + Base function to retrieve the users associated with a list. + """ + response, _ = await f(list_id, count, cursor) + + items = find_dict(response, 'entries', find_one=True)[0] + results = [] + for item in items: + entry_id = item['entryId'] + if entry_id.startswith('user'): + user_info = find_dict(item, 'result', find_one=True)[0] + results.append(User(self, user_info)) + elif entry_id.startswith('cursor-bottom'): + next_cursor = item['content']['value'] + break + + return Result( + results, + partial(self._get_list_users, f, list_id, count, next_cursor), + next_cursor + ) + + async def get_list_members( + self, list_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """Retrieves members of a list. + + Parameters + ---------- + list_id : :class:`str` + List ID. + count : int, default=20 + Number of members to retrieve. + + Returns + ------- + Result[:class:`User`] + Members of a list + + Examples + -------- + >>> members = client.get_list_members(123456789) + >>> for member in members: + ... print(member) + + + ... + ... + >>> more_members = members.next() # Retrieve more members + """ + return await self._get_list_users(self.gql.list_members, list_id, count, cursor) + + async def get_list_subscribers( + self, list_id: str, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """Retrieves subscribers of a list. + + Parameters + ---------- + list_id : :class:`str` + List ID. + count : :class:`int`, default=20 + Number of subscribers to retrieve. + + Returns + ------- + Result[:class:`User`] + Subscribers of a list + + Examples + -------- + >>> members = client.get_list_subscribers(123456789) + >>> for subscriber in subscribers: + ... print(subscriber) + + + ... + ... + >>> more_subscribers = members.next() # Retrieve more subscribers + """ + return await self._get_list_users(self.gql.list_subscribers, list_id, count, cursor) + + async def search_list( + self, query: str, count: int = 20, cursor: str | None = None + ) -> Result[List]: + """ + Search for lists based on the provided query. + + Parameters + ---------- + query : :class:`str` + The search query. + count : :class:`int`, default=20 + The number of lists to retrieve. + + Returns + ------- + Result[:class:`List`] + An instance of the `Result` class containing the + search results. + + Examples + -------- + >>> lists = await client.search_list('query') + >>> for list in lists: + ... print(list) + + + ... + + >>> more_lists = await lists.next() # Retrieve more lists + """ + response, _ = await self.gql.search_timeline(query, 'Lists', count, cursor) + entries = find_dict(response, 'entries', find_one=True)[0] + + if cursor is None: + items = entries[0]['content']['items'] + else: + items = find_dict(response, 'moduleItems', find_one=True)[0] + + lists = [] + for item in items: + lists.append(List(self, item['item']['itemContent']['list'])) + next_cursor = entries[-1]['content']['value'] + + return Result( + lists, + partial(self.search_list, query, count, next_cursor), + next_cursor + ) + + async def get_notifications( + self, + type: Literal['All', 'Verified', 'Mentions'], + count: int = 40, + cursor: str | None = None + ) -> Result[Notification]: + """ + Retrieve notifications based on the provided type. + + Parameters + ---------- + type : {'All', 'Verified', 'Mentions'} + Type of notifications to retrieve. + All: All notifications + Verified: Notifications relating to authenticated users + Mentions: Notifications with mentions + count : :class:`int`, default=40 + Number of notifications to retrieve. + + Returns + ------- + Result[:class:`Notification`] + List of retrieved notifications. + + Examples + -------- + >>> notifications = await client.get_notifications('All') + >>> for notification in notifications: + ... print(notification) + + + ... + ... + + >>> # Retrieve more notifications + >>> more_notifications = await notifications.next() + """ + type = type.capitalize() + f = { + 'All': self.v11.notifications_all, + 'Verified': self.v11.notifications_verified, + 'Mentions': self.v11.notifications_mentions + }[type] + response, _ = await f(count, cursor) + + global_objects = response['globalObjects'] + users = { + id: User(self, build_user_data(data)) + for id, data in global_objects.get('users', {}).items() + } + tweets = {} + + for id, tweet_data in global_objects.get('tweets', {}).items(): + user_id = tweet_data['user_id_str'] + user = users[user_id] + tweet = Tweet(self, build_tweet_data(tweet_data), user) + tweets[id] = tweet + + notifications = [] + + for notification in global_objects.get('notifications', {}).values(): + user_actions = notification['template']['aggregateUserActionsV1'] + target_objects = user_actions['targetObjects'] + if target_objects and 'tweet' in target_objects[0]: + tweet_id = target_objects[0]['tweet']['id'] + tweet = tweets[tweet_id] + else: + tweet = None + + from_users = user_actions['fromUsers'] + if from_users and 'user' in from_users[0]: + user_id = from_users[0]['user']['id'] + user = users[user_id] + else: + user = None + + notifications.append(Notification(self, notification, tweet, user)) + + entries = find_dict(response, 'entries', find_one=True)[0] + cursor_bottom_entry = [ + i for i in entries + if i['entryId'].startswith('cursor-bottom') + ] + if cursor_bottom_entry: + next_cursor = find_dict(cursor_bottom_entry[0], 'value', find_one=True)[0] + else: + next_cursor = None + + return Result( + notifications, + partial(self.get_notifications, type, count, next_cursor), + next_cursor + ) + + async def search_community( + self, query: str, cursor: str | None = None + ) -> Result[Community]: + """ + Searchs communities based on the specified query. + + Parameters + ---------- + query : :class:`str` + The search query. + + Returns + ------- + Result[:class:`Community`] + List of retrieved communities. + + Examples + -------- + >>> communities = await client.search_communities('query') + >>> for community in communities: + ... print(community) + + + ... + + >>> # Retrieve more communities + >>> more_communities = await communities.next() + """ + response, _ = await self.gql.search_community(query, cursor) + + items = find_dict(response, 'items_results', find_one=True)[0] + communities = [] + for item in items: + communities.append(Community(self, item['result'])) + next_cursor_ = find_dict(response, 'next_cursor', find_one=True) + next_cursor = next_cursor_[0] if next_cursor_ else None + if next_cursor is None: + fetch_next_result = None + else: + fetch_next_result = partial(self.search_community, query, next_cursor) + return Result( + communities, + fetch_next_result, + next_cursor + ) + + async def get_community(self, community_id: str) -> Community: + """ + Retrieves community by ID. + + Parameters + ---------- + list_id : :class:`str` + The ID of the community to retrieve. + + Returns + ------- + :class:`Community` + Community object. + """ + response, _ = await self.gql.community_query(community_id) + community_data = find_dict(response, 'result', find_one=True)[0] + return Community(self, community_data) + + async def get_community_tweets( + self, + community_id: str, + tweet_type: Literal['Top', 'Latest', 'Media'], + count: int = 40, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves tweets from a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community. + tweet_type : {'Top', 'Latest', 'Media'} + The type of tweets to retrieve. + count : :class:`int`, default=40 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + List of retrieved tweets. + + Examples + -------- + >>> community_id = '...' + >>> tweets = await client.get_community_tweets(community_id, 'Latest') + >>> for tweet in tweets: + ... print(tweet) + + + ... + >>> more_tweets = await tweets.next() # Retrieve more tweets + """ + if tweet_type == 'Media': + response, _ = await self.gql.community_media_timeline(community_id, count, cursor) + elif tweet_type == 'Top': + response, _ = await self.gql.community_tweets_timeline(community_id, 'Relevance', count, cursor) + elif tweet_type == 'Latest': + response, _ = await self.gql.community_tweets_timeline(community_id, 'Recency', count, cursor) + else: + raise ValueError(f'Invalid tweet_type: {tweet_type}') + + entries = find_dict(response, 'entries', find_one=True)[0] + if tweet_type == 'Media': + if cursor is None: + items = entries[0]['content']['items'] + next_cursor = entries[-1]['content']['value'] + previous_cursor = entries[-2]['content']['value'] + else: + items = find_dict(response, 'moduleItems', find_one=True)[0] + next_cursor = entries[-1]['content']['value'] + previous_cursor = entries[-2]['content']['value'] + else: + items = entries + next_cursor = items[-1]['content']['value'] + previous_cursor = items[-2]['content']['value'] + + tweets = [] + for item in items: + if not item['entryId'].startswith(('tweet', 'communities-grid')): + continue + + tweet = tweet_from_data(self, item) + if tweet is not None: + tweets.append(tweet) + + return Result( + tweets, + partial(self.get_community_tweets, community_id, tweet_type, count, next_cursor), + next_cursor, + partial(self.get_community_tweets, community_id, tweet_type, count, previous_cursor), + previous_cursor + ) + + async def get_communities_timeline( + self, count: int = 20, cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves tweets from communities timeline. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + List of retrieved tweets. + + Examples + -------- + >>> tweets = await client.get_communities_timeline() + >>> for tweet in tweets: + ... print(tweet) + + + ... + >>> more_tweets = await tweets.next() # Retrieve more tweets + """ + response, _ = await self.gql.communities_main_page_timeline(count, cursor) + items = find_dict(response, 'entries', find_one=True)[0] + tweets = [] + for item in items: + if not item['entryId'].startswith('tweet'): + continue + tweet_data = find_dict(item, 'result', find_one=True)[0] + if 'tweet' in tweet_data: + tweet_data = tweet_data['tweet'] + user_data = tweet_data['core']['user_results']['result'] + community_data = tweet_data['community_results']['result'] + community_data['rest_id'] = community_data['id_str'] + community = Community(self, community_data) + tweet = Tweet(self, tweet_data, User(self, user_data)) + tweet.community = community + tweets.append(tweet) + + next_cursor = items[-1]['content']['value'] + previous_cursor = items[-2]['content']['value'] + + return Result( + tweets, + partial(self.get_communities_timeline, count, next_cursor), + next_cursor, + partial(self.get_communities_timeline, count, previous_cursor), + previous_cursor + ) + + async def join_community(self, community_id: str) -> Community: + """ + Join a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community to join. + + Returns + ------- + :class:`Community` + The joined community. + """ + response, _ = await self.gql.join_community(community_id) + community_data = response['data']['community_join'] + community_data['rest_id'] = community_data['id_str'] + return Community(self, community_data) + + async def leave_community(self, community_id: str) -> Community: + """ + Leave a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community to leave. + + Returns + ------- + :class:`Community` + The left community. + """ + response, _ = await self.gql.leave_community(community_id) + community_data = response['data']['community_leave'] + community_data['rest_id'] = community_data['id_str'] + return Community(self, community_data) + + async def request_to_join_community( + self, community_id: str, answer: str | None = None + ) -> Community: + """ + Request to join a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community to request to join. + answer : :class:`str`, default=None + The answer to the join request. + + Returns + ------- + :class:`Community` + The requested community. + """ + response, _ = await self.gql.request_to_join_community(community_id, answer) + community_data = find_dict(response, 'result', find_one=True)[0] + community_data['rest_id'] = community_data['id_str'] + return Community(self, community_data) + + async def _get_community_users(self, f, community_id: str, count: int, cursor: str | None): + """ + Base function to retrieve community users. + """ + response, _ = await f(community_id, count, cursor) + + items = find_dict(response, 'items_results', find_one=True)[0] + users = [] + for item in items: + if 'result' not in item: + continue + if item['result'].get('__typename') != 'User': + continue + users.append(CommunityMember(self, item['result'])) + + next_cursor_ = find_dict(response, 'next_cursor', find_one=True) + next_cursor = next_cursor_[0] if next_cursor_ else None + + if next_cursor is None: + fetch_next_result = None + else: + fetch_next_result = partial(self._get_community_users, f, community_id, count, next_cursor) + return Result( + users, + fetch_next_result, + next_cursor + ) + + async def get_community_members( + self, community_id: str, count: int = 20, cursor: str | None = None + ) -> Result[CommunityMember]: + """ + Retrieves members of a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community. + count : :class:`int`, default=20 + The number of members to retrieve. + + Returns + ------- + Result[:class:`CommunityMember`] + List of retrieved members. + """ + return await self._get_community_users( + self.gql.members_slice_timeline_query, community_id, count, cursor + ) + + async def get_community_moderators( + self, community_id: str, count: int = 20, cursor: str | None = None + ) -> Result[CommunityMember]: + """ + Retrieves moderators of a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community. + count : :class:`int`, default=20 + The number of moderators to retrieve. + + Returns + ------- + Result[:class:`CommunityMember`] + List of retrieved moderators. + """ + return await self._get_community_users( + self.gql.moderators_slice_timeline_query, community_id, count, cursor + ) + + async def search_community_tweet( + self, + community_id: str, + query: str, + count: int = 20, + cursor: str | None = None + ) -> Result[Tweet]: + """Searchs tweets in a community. + + Parameters + ---------- + community_id : :class:`str` + The ID of the community. + query : :class:`str` + The search query. + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + List of retrieved tweets. + """ + response, _ = await self.gql.community_tweet_search_module_query(community_id, query, count, cursor) + + items = find_dict(response, 'entries', find_one=True)[0] + tweets = [] + for item in items: + if not item['entryId'].startswith('tweet'): + continue + + tweet = tweet_from_data(self, item) + if tweet is not None: + tweets.append(tweet) + + next_cursor = items[-1]['content']['value'] + previous_cursor = items[-2]['content']['value'] + + return Result( + tweets, + partial(self.search_community_tweet, community_id, query, count, next_cursor), + next_cursor, + partial(self.search_community_tweet, community_id, query, count, previous_cursor), + previous_cursor, + ) + + async def _stream(self, topics: set[str]) -> AsyncGenerator[tuple[str, Payload]]: + url = f'https://api.{DOMAIN}/live_pipeline/events' + params = {'topics': ','.join(topics)} + headers = self._base_headers + headers.pop('content-type') + + async with self.http.stream('GET', url, params=params, headers=headers, timeout=None) as response: + self._remove_duplicate_ct0_cookie() + async for line in response.aiter_lines(): + try: + data = json.loads(line) + except json.JSONDecodeError: + continue + payload = _payload_from_data(data['payload']) + yield data.get('topic'), payload + + async def get_streaming_session( + self, topics: set[str], auto_reconnect: bool = True + ) -> StreamingSession: + """ + Returns a session for interacting with the streaming API. + + Parameters + ---------- + topics : set[:class:`str`] + The set of topics to stream. + Topics can be generated using :class:`.Topic`. + auto_reconnect : :class:`bool`, default=True + Whether to automatically reconnect when disconnected. + + Returns + ------- + :class:`.StreamingSession` + A stream session instance. + + Examples + -------- + >>> from twikit.streaming import Topic + >>> + >>> topics = { + ... Topic.tweet_engagement('1739617652'), # Stream tweet engagement + ... Topic.dm_update('17544932482-174455537996'), # Stream DM update + ... Topic.dm_typing('17544932482-174455537996') # Stream DM typing + ... } + >>> session = await client.get_streaming_session(topics) + >>> + >>> async for topic, payload in session: + ... if payload.dm_update: + ... conversation_id = payload.dm_update.conversation_id + ... user_id = payload.dm_update.user_id + ... print(f'{conversation_id}: {user_id} sent a message') + >>> + >>> if payload.dm_typing: + ... conversation_id = payload.dm_typing.conversation_id + ... user_id = payload.dm_typing.user_id + ... print(f'{conversation_id}: {user_id} is typing') + >>> + >>> if payload.tweet_engagement: + ... like = payload.tweet_engagement.like_count + ... retweet = payload.tweet_engagement.retweet_count + ... view = payload.tweet_engagement.view_count + ... print('Tweet engagement updated:' + ... f'likes: {like} retweets: {retweet} views: {view}') + + Topics to stream can be added or deleted using + :attr:`.StreamingSession.update_subscriptions` method. + + >>> subscribe_topics = { + ... Topic.tweet_engagement('1749528513'), + ... Topic.tweet_engagement('1765829534') + ... } + >>> unsubscribe_topics = { + ... Topic.tweet_engagement('1739617652'), + ... Topic.dm_update('17544932482-174455537996'), + ... Topic.dm_update('17544932482-174455537996') + ... } + >>> await session.update_subscriptions( + ... subscribe_topics, unsubscribe_topics + ... ) + + See Also + -------- + .StreamingSession + .StreamingSession.update_subscriptions + .Payload + .Topic + """ + stream = self._stream(topics) + session_id = (await anext(stream))[1].config.session_id + return StreamingSession(self, session_id, stream, topics, auto_reconnect) + + async def _update_subscriptions( + self, + session: StreamingSession, + subscribe: set[str] | None = None, + unsubscribe: set[str] | None = None + ) -> Payload: + if subscribe is None: + subscribe = set() + if unsubscribe is None: + unsubscribe = set() + + response, _ = await self.v11.live_pipeline_update_subscriptions( + session.id, ','.join(subscribe), ','.join(unsubscribe) + ) + session.topics |= subscribe + session.topics -= unsubscribe + + return _payload_from_data(response) + + async def _get_user_state(self) -> Literal['normal', 'bounced', 'suspended']: + response, _ = await self.v11.user_state() + return response['userState'] diff --git a/build/lib/twikit/client/gql.py b/build/lib/twikit/client/gql.py new file mode 100644 index 00000000..54d7d570 --- /dev/null +++ b/build/lib/twikit/client/gql.py @@ -0,0 +1,705 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from ..constants import ( + DOMAIN, + BOOKMARK_FOLDER_TIMELINE_FEATURES, + COMMUNITY_NOTE_FEATURES, + COMMUNITY_TWEETS_FEATURES, + FEATURES, + JOIN_COMMUNITY_FEATURES, + LIST_FEATURES, + NOTE_TWEET_FEATURES, + SIMILAR_POSTS_FEATURES, + TWEET_RESULT_BY_REST_ID_FEATURES, + TWEET_RESULTS_BY_REST_IDS_FEATURES, + USER_FEATURES, + USER_HIGHLIGHTS_TWEETS_FEATURES +) +from ..utils import flatten_params, get_query_id + +if TYPE_CHECKING: + from ..guest.client import GuestClient + from .client import Client + + ClientType = Client | GuestClient + + +class Endpoint: + @staticmethod + def url(path): + return f'https://{DOMAIN}/i/api/graphql/{path}' + + SEARCH_TIMELINE = url('flaR-PUMshxFWZWPNpq4zA/SearchTimeline') + SIMILAR_POSTS = url('EToazR74i0rJyZYalfVEAQ/SimilarPosts') + CREATE_NOTE_TWEET = url('iCUB42lIfXf9qPKctjE5rQ/CreateNoteTweet') + CREATE_TWEET = url('SiM_cAu83R0wnrpmKQQSEw/CreateTweet') + CREATE_SCHEDULED_TWEET = url('LCVzRQGxOaGnOnYH01NQXg/CreateScheduledTweet') + DELETE_TWEET = url('VaenaVgh5q5ih7kvyVjgtg/DeleteTweet') + USER_BY_SCREEN_NAME = url('NimuplG1OB7Fd2btCLdBOw/UserByScreenName') + USER_BY_REST_ID = url('tD8zKvQzwY3kdx5yz6YmOw/UserByRestId') + TWEET_DETAIL = url('U0HTv-bAWTBYylwEMT7x5A/TweetDetail') + TWEET_RESULT_BY_REST_ID = url('Xl5pC_lBk_gcO2ItU39DQw/TweetResultByRestId') + FETCH_SCHEDULED_TWEETS = url('ITtjAzvlZni2wWXwf295Qg/FetchScheduledTweets') + DELETE_SCHEDULED_TWEET = url('CTOVqej0JBXAZSwkp1US0g/DeleteScheduledTweet') + RETWEETERS = url('X-XEqG5qHQSAwmvy00xfyQ/Retweeters') + FAVORITERS = url('LLkw5EcVutJL6y-2gkz22A/Favoriters') + FETCH_COMMUNITY_NOTE = url('fKWPPj271aTM-AB9Xp48IA/BirdwatchFetchOneNote') + USER_TWEETS = url('QWF3SzpHmykQHsQMixG0cg/UserTweets') + USER_TWEETS_AND_REPLIES = url('vMkJyzx1wdmvOeeNG0n6Wg/UserTweetsAndReplies') + USER_MEDIA = url('2tLOJWwGuCTytDrGBg8VwQ/UserMedia') + USER_LIKES = url('IohM3gxQHfvWePH5E3KuNA/Likes') + USER_HIGHLIGHTS_TWEETS = url('tHFm_XZc_NNi-CfUThwbNw/UserHighlightsTweets') + HOME_TIMELINE = url('-X_hcgQzmHGl29-UXxz4sw/HomeTimeline') + HOME_LATEST_TIMELINE = url('U0cdisy7QFIoTfu3-Okw0A/HomeLatestTimeline') + FAVORITE_TWEET = url('lI07N6Otwv1PhnEgXILM7A/FavoriteTweet') + UNFAVORITE_TWEET = url('ZYKSe-w7KEslx3JhSIk5LA/UnfavoriteTweet') + CREATE_RETWEET = url('ojPdsZsimiJrUGLR1sjUtA/CreateRetweet') + DELETE_RETWEET = url('iQtK4dl5hBmXewYZuEOKVw/DeleteRetweet') + CREATE_BOOKMARK = url('aoDbu3RHznuiSkQ9aNM67Q/CreateBookmark') + BOOKMARK_TO_FOLDER = url('4KHZvvNbHNf07bsgnL9gWA/bookmarkTweetToFolder') + DELETE_BOOKMARK = url('Wlmlj2-xzyS1GN3a6cj-mQ/DeleteBookmark') + BOOKMARKS = url('qToeLeMs43Q8cr7tRYXmaQ/Bookmarks') + BOOKMARK_FOLDER_TIMELINE = url('8HoabOvl7jl9IC1Aixj-vg/BookmarkFolderTimeline') + BOOKMARKS_ALL_DELETE = url('skiACZKC1GDYli-M8RzEPQ/BookmarksAllDelete') + BOOKMARK_FOLDERS_SLICE = url('i78YDd0Tza-dV4SYs58kRg/BookmarkFoldersSlice') + EDIT_BOOKMARK_FOLDER = url('a6kPp1cS1Dgbsjhapz1PNw/EditBookmarkFolder') + DELETE_BOOKMARK_FOLDER = url('2UTTsO-6zs93XqlEUZPsSg/DeleteBookmarkFolder') + CREATE_BOOKMARK_FOLDER = url('6Xxqpq8TM_CREYiuof_h5w/createBookmarkFolder') + FOLLOWERS = url('gC_lyAxZOptAMLCJX5UhWw/Followers') + BLUE_VERIFIED_FOLLOWERS = url('VmIlPJNEDVQ29HfzIhV4mw/BlueVerifiedFollowers') + FOLLOWERS_YOU_KNOW = url('f2tbuGNjfOE8mNUO5itMew/FollowersYouKnow') + FOLLOWING = url('2vUj-_Ek-UmBVDNtd8OnQA/Following') + USER_CREATOR_SUBSCRIPTIONS = url('Wsm5ZTCYtg2eH7mXAXPIgw/UserCreatorSubscriptions') + USER_DM_REACTION_MUTATION_ADD_MUTATION = url('VyDyV9pC2oZEj6g52hgnhA/useDMReactionMutationAddMutation') + USER_DM_REACTION_MUTATION_REMOVE_MUTATION = url('bV_Nim3RYHsaJwMkTXJ6ew/useDMReactionMutationRemoveMutation') + DM_MESSAGE_DELETE_MUTATION = url('BJ6DtxA2llfjnRoRjaiIiw/DMMessageDeleteMutation') + ADD_PARTICIPANTS_MUTATION = url('oBwyQ0_xVbAQ8FAyG0pCRA/AddParticipantsMutation') + CREATE_LIST = url('EYg7JZU3A1eJ-wr2eygPHQ/CreateList') + EDIT_LIST_BANNER = url('t_DsROHldculsB0B9BUAWw/EditListBanner') + DELETE_LIST_BANNER = url('Y90WuxdWugtMRJhkXTdvzg/DeleteListBanner') + UPDATE_LIST = url('dIEI1sbSAuZlxhE0ggrezA/UpdateList') + LIST_ADD_MEMBER = url('lLNsL7mW6gSEQG6rXP7TNw/ListAddMember') + LIST_REMOVE_MEMBER = url('cvDFkG5WjcXV0Qw5nfe1qQ/ListRemoveMember') + LIST_MANAGEMENT_PACE_TIMELINE = url('47170qwZCt5aFo9cBwFoNA/ListsManagementPageTimeline') + LIST_BY_REST_ID = url('9hbYpeVBMq8-yB8slayGWQ/ListByRestId') + LIST_LATEST_TWEETS_TIMELINE = url('HjsWc-nwwHKYwHenbHm-tw/ListLatestTweetsTimeline') + LIST_MEMBERS = url('BQp2IEYkgxuSxqbTAr1e1g/ListMembers') + LIST_SUBSCRIBERS = url('74wGEkaBxrdoXakWTWMxRQ/ListSubscribers') + SEARCH_COMMUNITY = url('daVUkhfHn7-Z8llpYVKJSw/CommunitiesSearchQuery') + COMMUNITY_QUERY = url('lUBKrilodgg9Nikaw3cIiA/CommunityQuery') + COMMUNITY_MEDIA_TIMELINE = url('Ht5K2ckaZYAOuRFmFfbHig/CommunityMediaTimeline') + COMMUNITY_TWEETS_TIMELINE = url('mhwSsmub4JZgHcs0dtsjrw/CommunityTweetsTimeline') + COMMUNITIES_MAIN_PAGE_TIMELINE = url('4-4iuIdaLPpmxKnA3mr2LA/CommunitiesMainPageTimeline') + JOIN_COMMUNITY = url('xZQLbDwbI585YTG0QIpokw/JoinCommunity') + LEAVE_COMMUNITY = url('OoS6Kd4-noNLXPZYHtygeA/LeaveCommunity') + REQUEST_TO_JOIN_COMMUNITY = url('XwWChphD_6g7JnsFus2f2Q/RequestToJoinCommunity') + MEMBERS_SLICE_TIMELINE_QUERY = url('KDAssJ5lafCy-asH4wm1dw/membersSliceTimeline_Query') + MODERATORS_SLICE_TIMELINE_QUERY = url('9KI_r8e-tgp3--N5SZYVjg/moderatorsSliceTimeline_Query') + COMMUNITY_TWEET_SEARCH_MODULE_QUERY = url('5341rmzzvdjqfmPKfoHUBw/CommunityTweetSearchModuleQuery') + TWEET_RESULTS_BY_REST_IDS = url('PTN9HhBAlpoCTHfspDgqLA/TweetResultsByRestIds') + + +class GQLClient: + def __init__(self, base: ClientType) -> None: + self.base = base + + async def gql_get( + self, + url: str, + variables: dict, + features: dict | None = None, + headers: dict | None = None, + extra_params: dict | None = None, + **kwargs + ): + params = {'variables': variables} + if features is not None: + params['features'] = features + if extra_params is not None: + params |= extra_params + if headers is None: + headers = self.base._base_headers + return await self.base.get(url, params=flatten_params(params), headers=headers, **kwargs) + + async def gql_post( + self, + url: str, + variables: dict, + features: dict | None = None, + headers: dict | None = None, + extra_data: dict | None = None, + **kwargs + ): + data = {'variables': variables, 'queryId': get_query_id(url)} + if features is not None: + data['features'] = features + if extra_data is not None: + data |= extra_data + if headers is None: + headers = self.base._base_headers + return await self.base.post(url, json=data, headers=headers, **kwargs) + + async def search_timeline( + self, + query: str, + product: str, + count: int, + cursor: str | None + ): + variables = { + 'rawQuery': query, + 'count': count, + 'querySource': 'typed_query', + 'product': product + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.SEARCH_TIMELINE, variables, FEATURES) + + async def similar_posts(self, tweet_id: str): + variables = {'tweet_id': tweet_id} + return await self.gql_get( + Endpoint.SIMILAR_POSTS, + variables, + SIMILAR_POSTS_FEATURES + ) + + async def create_tweet( + self, is_note_tweet, text, media_entities, + poll_uri, reply_to, attachment_url, + community_id, share_with_followers, + richtext_options, edit_tweet_id, limit_mode + ): + variables = { + 'tweet_text': text, + 'dark_request': False, + 'media': { + 'media_entities': media_entities, + 'possibly_sensitive': False + }, + 'semantic_annotation_ids': [], + } + + if poll_uri is not None: + variables['card_uri'] = poll_uri + + if reply_to is not None: + variables['reply'] = { + 'in_reply_to_tweet_id': reply_to, + 'exclude_reply_user_ids': [] + } + + if limit_mode is not None: + variables['conversation_control'] = {'mode': limit_mode} + + if attachment_url is not None: + variables['attachment_url'] = attachment_url + + if community_id is not None: + variables['semantic_annotation_ids'] = [{ + 'entity_id': community_id, + 'group_id': '8', + 'domain_id': '31' + }] + variables['broadcast'] = share_with_followers + + if richtext_options is not None: + is_note_tweet = True + variables['richtext_options'] = { + 'richtext_tags': richtext_options + } + if edit_tweet_id is not None: + variables['edit_options'] = { + 'previous_tweet_id': edit_tweet_id + } + + if is_note_tweet: + endpoint = Endpoint.CREATE_NOTE_TWEET + features = NOTE_TWEET_FEATURES + else: + endpoint = Endpoint.CREATE_TWEET + features = FEATURES + return await self.gql_post(endpoint, variables, features) + + async def create_scheduled_tweet(self, scheduled_at, text, media_ids) -> str: + variables = { + 'post_tweet_request': { + 'auto_populate_reply_metadata': False, + 'status': text, + 'exclude_reply_user_ids': [], + 'media_ids': media_ids + }, + 'execute_at': scheduled_at + } + return await self.gql_post(Endpoint.CREATE_SCHEDULED_TWEET, variables) + + async def delete_tweet(self, tweet_id): + variables = { + 'tweet_id': tweet_id, + 'dark_request': False + } + return await self.gql_post(Endpoint.DELETE_TWEET, variables) + + async def user_by_screen_name(self, screen_name): + variables = { + 'screen_name': screen_name, + 'withSafetyModeUserFields': False + } + params = { + 'fieldToggles': {'withAuxiliaryUserLabels': False} + } + return await self.gql_get(Endpoint.USER_BY_SCREEN_NAME, variables, USER_FEATURES, extra_params=params) + + async def user_by_rest_id(self, user_id): + variables = { + 'userId': user_id, + 'withSafetyModeUserFields': True + } + return await self.gql_get(Endpoint.USER_BY_REST_ID, variables, USER_FEATURES) + + async def tweet_detail(self, tweet_id, cursor): + variables = { + 'focalTweetId': tweet_id, + 'with_rux_injections': False, + 'includePromotedContent': True, + 'withCommunity': True, + 'withQuickPromoteEligibilityTweetFields': True, + 'withBirdwatchNotes': True, + 'withVoice': True, + 'withV2Timeline': True + } + if cursor is not None: + variables['cursor'] = cursor + params = { + 'fieldToggles': {'withAuxiliaryUserLabels': False} + } + return await self.gql_get(Endpoint.TWEET_DETAIL, variables, FEATURES, extra_params=params) + + async def fetch_scheduled_tweets(self): + variables = {'ascending': True} + return await self.gql_get(Endpoint.FETCH_SCHEDULED_TWEETS, variables) + + async def delete_scheduled_tweet(self, tweet_id): + variables = {'scheduled_tweet_id': tweet_id} + return await self.gql_post(Endpoint.DELETE_SCHEDULED_TWEET, variables) + + async def tweet_engagements(self, tweet_id, count, cursor, endpoint): + variables = { + 'tweetId': tweet_id, + 'count': count, + 'includePromotedContent': True + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(endpoint, variables, FEATURES) + + async def retweeters(self, tweet_id, count, cursor): + return await self.tweet_engagements(tweet_id, count, cursor, Endpoint.RETWEETERS) + + async def favoriters(self, tweet_id, count, cursor): + return await self.tweet_engagements(tweet_id, count, cursor, Endpoint.FAVORITERS) + + async def bird_watch_one_note(self, note_id): + variables = {'note_id': note_id} + return await self.gql_get(Endpoint.FETCH_COMMUNITY_NOTE, variables, COMMUNITY_NOTE_FEATURES) + + async def _get_user_tweets(self, user_id, count, cursor, endpoint): + variables = { + 'userId': user_id, + 'count': count, + 'includePromotedContent': True, + 'withQuickPromoteEligibilityTweetFields': True, + 'withVoice': True, + 'withV2Timeline': True + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(endpoint, variables, FEATURES) + + async def user_tweets(self, user_id, count, cursor): + return await self._get_user_tweets(user_id, count, cursor, Endpoint.USER_TWEETS) + + async def user_tweets_and_replies(self, user_id, count, cursor): + return await self._get_user_tweets(user_id, count, cursor, Endpoint.USER_TWEETS_AND_REPLIES) + + async def user_media(self, user_id, count, cursor): + return await self._get_user_tweets(user_id, count, cursor, Endpoint.USER_MEDIA) + + async def user_likes(self, user_id, count, cursor): + return await self._get_user_tweets(user_id, count, cursor, Endpoint.USER_LIKES) + + async def user_highlights_tweets(self, user_id, count, cursor): + variables = { + 'userId': user_id, + 'count': count, + 'includePromotedContent': True, + 'withVoice': True + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get( + Endpoint.USER_HIGHLIGHTS_TWEETS, + variables, + USER_HIGHLIGHTS_TWEETS_FEATURES, + self.base._base_headers + ) + + async def home_timeline(self, count, seen_tweet_ids, cursor): + variables = { + 'count': count, + 'includePromotedContent': True, + 'latestControlAvailable': True, + 'requestContext': 'launch', + 'withCommunity': True, + 'seenTweetIds': seen_tweet_ids or [] + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_post(Endpoint.HOME_TIMELINE, variables, FEATURES) + + async def home_latest_timeline(self, count, seen_tweet_ids, cursor): + variables = { + 'count': count, + 'includePromotedContent': True, + 'latestControlAvailable': True, + 'requestContext': 'launch', + 'withCommunity': True, + 'seenTweetIds': seen_tweet_ids or [] + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_post(Endpoint.HOME_LATEST_TIMELINE, variables, FEATURES) + + async def favorite_tweet(self, tweet_id): + variables = {'tweet_id': tweet_id} + return await self.gql_post(Endpoint.FAVORITE_TWEET, variables) + + async def unfavorite_tweet(self, tweet_id): + variables = {'tweet_id': tweet_id} + return await self.gql_post(Endpoint.UNFAVORITE_TWEET, variables) + + async def retweet(self, tweet_id): + variables = {'tweet_id': tweet_id, 'dark_request': False} + return await self.gql_post(Endpoint.CREATE_RETWEET, variables) + + async def delete_retweet(self, tweet_id): + variables = {'source_tweet_id': tweet_id,'dark_request': False} + return await self.gql_post(Endpoint.DELETE_RETWEET, variables) + + async def create_bookmark(self, tweet_id): + variables = {'tweet_id': tweet_id} + return await self.gql_post(Endpoint.CREATE_BOOKMARK, variables) + + async def bookmark_tweet_to_folder(self, tweet_id, folder_id): + variables = { + 'tweet_id': tweet_id, + 'bookmark_collection_id': folder_id + } + return await self.gql_post(Endpoint.BOOKMARK_TO_FOLDER, variables) + + async def delete_bookmark(self, tweet_id): + variables = {'tweet_id': tweet_id} + return await self.gql_post(Endpoint.DELETE_BOOKMARK, variables) + + async def bookmarks(self, count, cursor): + variables = { + 'count': count, + 'includePromotedContent': True + } + features = FEATURES | { + 'graphql_timeline_v2_bookmark_timeline': True + } + if cursor is not None: + variables['cursor'] = cursor + params = flatten_params({ + 'variables': variables, + 'features': features + }) + return await self.base.get( + Endpoint.BOOKMARKS, + params=params, + headers=self.base._base_headers + ) + + async def bookmark_folder_timeline(self, count, cursor, folder_id): + variables = { + 'count': count, + 'includePromotedContent': True, + 'bookmark_collection_id': folder_id + } + variables['bookmark_collection_id'] = folder_id + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.BOOKMARK_FOLDER_TIMELINE, variables, BOOKMARK_FOLDER_TIMELINE_FEATURES) + + async def delete_all_bookmarks(self): + return await self.gql_post(Endpoint.BOOKMARKS_ALL_DELETE, {}) + + async def bookmark_folders_slice(self, cursor): + variables = {} + if cursor is not None: + variables['cursor'] = cursor + variables = {'variables': variables} + return await self.gql_get(Endpoint.BOOKMARK_FOLDERS_SLICE, variables) + + async def edit_bookmark_folder(self, folder_id, name): + variables = { + 'bookmark_collection_id': folder_id, + 'name': name + } + return await self.gql_post(Endpoint.EDIT_BOOKMARK_FOLDER, variables) + + async def delete_bookmark_folder(self, folder_id): + variables = {'bookmark_collection_id': folder_id} + return await self.gql_post(Endpoint.DELETE_BOOKMARK_FOLDER, variables) + + async def create_bookmark_folder(self, name): + variables = {'name': name} + return await self.gql_post(Endpoint.CREATE_BOOKMARK_FOLDER, variables) + + async def _friendships(self, user_id, count, endpoint, cursor): + variables = { + 'userId': user_id, + 'count': count, + 'includePromotedContent': False + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(endpoint, variables, FEATURES) + + async def followers(self, user_id, count, cursor): + return await self._friendships(user_id, count, Endpoint.FOLLOWERS, cursor) + + async def blue_verified_followers(self, user_id, count, cursor): + return await self._friendships(user_id, count, Endpoint.BLUE_VERIFIED_FOLLOWERS, cursor) + + async def followers_you_know(self, user_id, count, cursor): + return await self._friendships(user_id, count, Endpoint.FOLLOWERS_YOU_KNOW, cursor) + + async def following(self, user_id, count, cursor): + return await self._friendships(user_id, count, Endpoint.FOLLOWING, cursor) + + async def user_creator_subscriptions(self, user_id, count, cursor): + return await self._friendships(user_id, count, Endpoint.USER_CREATOR_SUBSCRIPTIONS, cursor) + + async def user_dm_reaction_mutation_add_mutation(self, message_id, conversation_id, emoji): + variables = { + 'messageId': message_id, + 'conversationId': conversation_id, + 'reactionTypes': ['Emoji'], + 'emojiReactions': [emoji] + } + return await self.gql_post(Endpoint.USER_DM_REACTION_MUTATION_ADD_MUTATION, variables) + + async def user_dm_reaction_mutation_remove_mutation(self, message_id, conversation_id, emoji): + variables = { + 'conversationId': conversation_id, + 'messageId': message_id, + 'reactionTypes': ['Emoji'], + 'emojiReactions': [emoji] + } + return await self.gql_post(Endpoint.USER_DM_REACTION_MUTATION_REMOVE_MUTATION, variables) + + async def dm_message_delete_mutation(self, message_id): + variables = {'messageId': message_id} + return await self.gql_post(Endpoint.DM_MESSAGE_DELETE_MUTATION, variables) + + async def add_participants_mutation(self, group_id, user_ids): + variables = { + 'addedParticipants': user_ids, + 'conversationId': group_id + } + return await self.gql_post(Endpoint.ADD_PARTICIPANTS_MUTATION, variables) + + async def create_list(self, name, description, is_private): + variables = { + 'isPrivate': is_private, + 'name': name, + 'description': description + } + return await self.gql_post(Endpoint.CREATE_LIST, variables, LIST_FEATURES) + + async def edit_list_banner(self, list_id, media_id): + variables = { + 'listId': list_id, + 'mediaId': media_id + } + return await self.gql_post(Endpoint.EDIT_LIST_BANNER, variables, LIST_FEATURES) + + async def delete_list_banner(self, list_id): + variables = {'listId': list_id} + return await self.gql_post(Endpoint.DELETE_LIST_BANNER, variables, LIST_FEATURES) + + async def update_list(self, list_id, name, description, is_private): + variables = {'listId': list_id} + if name is not None: + variables['name'] = name + if description is not None: + variables['description'] = description + if is_private is not None: + variables['isPrivate'] = is_private + return await self.gql_post(Endpoint.UPDATE_LIST, variables, LIST_FEATURES) + + async def list_add_member(self, list_id, user_id): + variables = { + 'listId': list_id, + 'userId': user_id + } + return await self.gql_post(Endpoint.LIST_ADD_MEMBER, variables, LIST_FEATURES) + + async def list_remove_member(self, list_id, user_id): + variables = { + 'listId': list_id, + 'userId': user_id + } + return await self.gql_post(Endpoint.LIST_REMOVE_MEMBER, variables, LIST_FEATURES) + + async def list_management_pace_timeline(self, count, cursor): + variables = {'count': count} + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.LIST_MANAGEMENT_PACE_TIMELINE, variables, FEATURES) + + async def list_by_rest_id(self, list_id): + variables = {'listId': list_id} + return await self.gql_get(Endpoint.LIST_BY_REST_ID, variables, LIST_FEATURES) + + async def list_latest_tweets_timeline(self, list_id, count, cursor): + variables = {'listId': list_id, 'count': count} + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.LIST_LATEST_TWEETS_TIMELINE, variables, FEATURES) + + async def _list_users(self, endpoint, list_id, count, cursor): + variables = {'listId': list_id, 'count': count} + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(endpoint, variables, FEATURES) + + async def list_members(self, list_id, count, cursor): + return await self._list_users(Endpoint.LIST_MEMBERS, list_id, count, cursor) + + async def list_subscribers(self, list_id, count, cursor): + return await self._list_users(Endpoint.LIST_SUBSCRIBERS, list_id, count, cursor) + + async def search_community(self, query, cursor): + variables = {'query': query} + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.SEARCH_COMMUNITY, variables) + + async def community_query(self, community_id): + variables = {'communityId': community_id} + features = { + 'c9s_list_members_action_api_enabled': False, + 'c9s_superc9s_indication_enabled': False + } + return await self.gql_get(Endpoint.COMMUNITY_QUERY, variables, features) + + async def community_media_timeline(self, community_id, count, cursor): + variables = { + 'communityId': community_id, + 'count': count, + 'withCommunity': True + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.COMMUNITY_MEDIA_TIMELINE, variables, COMMUNITY_TWEETS_FEATURES) + + async def community_tweets_timeline(self, community_id, ranking_mode, count, cursor): + variables = { + 'communityId': community_id, + 'count': count, + 'withCommunity': True, + 'rankingMode': ranking_mode + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.COMMUNITY_TWEETS_TIMELINE, variables, COMMUNITY_TWEETS_FEATURES) + + async def communities_main_page_timeline(self, count, cursor): + variables = { + 'count': count, + 'withCommunity': True + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.COMMUNITIES_MAIN_PAGE_TIMELINE, variables, COMMUNITY_TWEETS_FEATURES) + + async def join_community(self, community_id): + variables = {'communityId': community_id} + return await self.gql_post(Endpoint.JOIN_COMMUNITY, variables, JOIN_COMMUNITY_FEATURES) + + async def leave_community(self, community_id): + variables = {'communityId': community_id} + return await self.gql_post(Endpoint.LEAVE_COMMUNITY, variables, JOIN_COMMUNITY_FEATURES) + + async def request_to_join_community(self, community_id, answer): + variables = { + 'communityId': community_id, + 'answer': '' if answer is None else answer + } + return await self.gql_post(Endpoint.REQUEST_TO_JOIN_COMMUNITY, variables, JOIN_COMMUNITY_FEATURES) + + async def _get_community_users(self, endpoint, community_id, count, cursor): + variables = {'communityId': community_id, 'count': count} + features = {'responsive_web_graphql_timeline_navigation_enabled': True} + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(endpoint, variables, features) + + async def members_slice_timeline_query(self, community_id, count, cursor): + return await self._get_community_users(Endpoint.MEMBERS_SLICE_TIMELINE_QUERY, community_id, count, cursor) + + async def moderators_slice_timeline_query(self, community_id, count, cursor): + return await self._get_community_users(Endpoint.MODERATORS_SLICE_TIMELINE_QUERY, community_id, count, cursor) + + async def community_tweet_search_module_query(self, community_id, query, count, cursor): + variables = { + 'count': count, + 'query': query, + 'communityId': community_id, + 'includePromotedContent': False, + 'withBirdwatchNotes': True, + 'withVoice': False, + 'isListMemberTargetUserId': '0', + 'withCommunity': False, + 'withSafetyModeUserFields': True + } + if cursor is not None: + variables['cursor'] = cursor + return await self.gql_get(Endpoint.COMMUNITY_TWEET_SEARCH_MODULE_QUERY, variables, COMMUNITY_TWEETS_FEATURES) + + async def tweet_results_by_rest_ids(self, tweet_ids): + variables = { + 'tweetIds': tweet_ids, + 'includePromotedContent': True, + 'withBirdwatchNotes': True, + 'withVoice': True, + 'withCommunity': True + } + return await self.gql_get(Endpoint.TWEET_RESULTS_BY_REST_IDS, variables, TWEET_RESULTS_BY_REST_IDS_FEATURES) + + #################### + # For guest client + #################### + + async def tweet_result_by_rest_id(self, tweet_id): + variables = { + 'tweetId': tweet_id, + 'withCommunity': False, + 'includePromotedContent': False, + 'withVoice': False + } + params = { + 'fieldToggles': { + 'withArticleRichContentState': True, + 'withArticlePlainText': False, + 'withGrokAnalyze': False + } + } + return await self.gql_get( + Endpoint.TWEET_RESULT_BY_REST_ID, variables, TWEET_RESULT_BY_REST_ID_FEATURES, extra_params=params + ) diff --git a/build/lib/twikit/client/v11.py b/build/lib/twikit/client/v11.py new file mode 100644 index 00000000..d4dbdda1 --- /dev/null +++ b/build/lib/twikit/client/v11.py @@ -0,0 +1,512 @@ +from __future__ import annotations + +import json +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ..guest.client import GuestClient + from .client import Client + + ClientType = Client | GuestClient + +from ..constants import DOMAIN + +class Endpoint: + GUEST_ACTIVATE = f'https://api.{DOMAIN}/1.1/guest/activate.json' + ONBOARDING_SSO_INIT = f'https://api.{DOMAIN}/1.1/onboarding/sso_init.json' + ACCOUNT_LOGOUT = f'https://api.{DOMAIN}/1.1/account/logout.json' + ONBOARDING_TASK = f'https://api.{DOMAIN}/1.1/onboarding/task.json' + SETTINGS = f'https://api.{DOMAIN}/1.1/account/settings.json' + UPLOAD_MEDIA = f'https://upload.{DOMAIN}/i/media/upload.json' + UPLOAD_MEDIA_2 = f'https://upload.{DOMAIN}/i/media/upload2.json' + CREATE_MEDIA_METADATA = f'https://api.{DOMAIN}/1.1/media/metadata/create.json' + CREATE_CARD = f'https://caps.{DOMAIN}/v2/cards/create.json' + VOTE = f'https://caps.{DOMAIN}/v2/capi/passthrough/1' + REVERSE_GEOCODE = f'https://api.{DOMAIN}/1.1/geo/reverse_geocode.json' + SEARCH_GEO = f'https://api.{DOMAIN}/1.1/geo/search.json' + GET_PLACE = f'https://api.{DOMAIN}/1.1/geo/id/{{}}.json' + CREATE_FRIENDSHIPS = f'https://{DOMAIN}/i/api/1.1/friendships/create.json' + DESTROY_FRIENDSHIPS = f'https://{DOMAIN}/i/api/1.1/friendships/destroy.json' + CREATE_BLOCKS = f'https://{DOMAIN}/i/api/1.1/blocks/create.json' + DESTROY_BLOCKS = f'https://{DOMAIN}/i/api/1.1/blocks/destroy.json' + CREATE_MUTES = f'https://{DOMAIN}/i/api/1.1/mutes/users/create.json' + DESTROY_MUTES = f'https://{DOMAIN}/i/api/1.1/mutes/users/destroy.json' + GUIDE = f'https://{DOMAIN}/i/api/2/guide.json' + AVAILABLE_TRENDS = f'https://api.{DOMAIN}/1.1/trends/available.json' + PLACE_TRENDS = f'https://api.{DOMAIN}/1.1/trends/place.json' + FOLLOWERS_LIST = f'https://api.{DOMAIN}/1.1/followers/list.json' + FRIENDS_LIST = f'https://api.{DOMAIN}/1.1/friends/list.json' + FOLLOWERS_IDS = f'https://api.{DOMAIN}/1.1/followers/ids.json' + FRIENDS_IDS = f'https://api.{DOMAIN}/1.1/friends/ids.json' + DM_NEW = f'https://{DOMAIN}/i/api/1.1/dm/new2.json' + DM_INBOX = f'https://{DOMAIN}/i/api/1.1/dm/inbox_initial_state.json' + DM_CONVERSATION = f'https://{DOMAIN}/i/api/1.1/dm/conversation/{{}}.json' + CONVERSATION_UPDATE_NAME = f'https://{DOMAIN}/i/api/1.1/dm/conversation/{{}}/update_name.json' + NOTIFICATIONS_ALL = f'https://{DOMAIN}/i/api/2/notifications/all.json' + NOTIFICATIONS_VERIFIED = f'https://{DOMAIN}/i/api/2/notifications/verified.json' + NOTIFICATIONS_MENTIONS = f'https://{DOMAIN}/i/api/2/notifications/mentions.json' + LIVE_PIPELINE_EVENTS = f'https://api.{DOMAIN}/live_pipeline/events' + LIVE_PIPELINE_UPDATE_SUBSCRIPTIONS = f'https://api.{DOMAIN}/1.1/live_pipeline/update_subscriptions' + USER_STATE = f'https://api.{DOMAIN}/help-center/forms/api/prod/user_state.json' + + +class V11Client: + def __init__(self, base: ClientType) -> None: + self.base = base + + async def guest_activate(self): + headers = self.base._base_headers + headers.pop('X-Twitter-Active-User', None) + headers.pop('X-Twitter-Auth-Type', None) + return await self.base.post( + Endpoint.GUEST_ACTIVATE, + headers=headers, + data={} + ) + + async def account_logout(self): + return await self.base.post( + Endpoint.ACCOUNT_LOGOUT, + headers=self.base._base_headers + ) + + async def onboarding_task(self, guest_token, token, subtask_inputs, data = None, **kwargs): + if data is None: + data = {} + if token is not None: + data['flow_token'] = token + if subtask_inputs is not None: + data['subtask_inputs'] = subtask_inputs + + headers = { + 'x-guest-token': guest_token, + 'Authorization': 'Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA' + } + + if self.base._get_csrf_token(): + headers["x-csrf-token"] = self.base._get_csrf_token() + headers["x-twitter-auth-type"] = "OAuth2Session" + + return await self.base.post( + Endpoint.ONBOARDING_TASK, + json=data, + headers=headers, + **kwargs + ) + + async def sso_init(self, provider, guest_token): + headers = self.base._base_headers | { + 'x-guest-token': guest_token + } + headers.pop('X-Twitter-Active-User') + headers.pop('X-Twitter-Auth-Type') + return await self.base.post( + Endpoint.ONBOARDING_SSO_INIT, + json={'provider': provider}, + headers=headers + ) + + async def settings(self): + return await self.base.get( + Endpoint.SETTINGS, + headers=self.base._base_headers + ) + + async def upload_media(self, method, is_long_video: bool, *args, **kwargs): + if is_long_video: + endpoint = Endpoint.UPLOAD_MEDIA_2 + else: + endpoint = Endpoint.UPLOAD_MEDIA + return await self.base.request(method, endpoint, *args, **kwargs) + + async def upload_media_init(self, media_type, total_bytes, media_category, is_long_video: bool): + params = { + 'command': 'INIT', + 'total_bytes': total_bytes, + 'media_type': media_type + } + if media_category is not None: + params['media_category'] = media_category + + return await self.upload_media( + 'POST', + is_long_video, + params=params, + headers=self.base._base_headers + ) + + async def upload_media_append(self, is_long_video, media_id, segment_index, chunk_stream): + params = { + 'command': 'APPEND', + 'media_id': media_id, + 'segment_index': segment_index, + } + headers = self.base._base_headers + headers.pop('content-type') + files = { + 'media': ( + 'blob', + chunk_stream, + 'application/octet-stream', + ) + } + return await self.upload_media( + 'POST', + is_long_video, + params=params, + headers=headers, files=files + ) + + async def upload_media_finelize(self, is_long_video, media_id): + params = { + 'command': 'FINALIZE', + 'media_id': media_id, + } + return await self.upload_media( + 'POST', + is_long_video, + params=params, + headers=self.base._base_headers, + ) + + async def upload_media_status(self, is_long_video, media_id): + params = { + 'command': 'STATUS', + 'media_id': media_id, + } + return await self.upload_media( + 'GET', + is_long_video, + params=params, + headers=self.base._base_headers, + ) + + async def create_media_metadata(self, media_id, alt_text, sensitive_warning): + data = {'media_id': media_id} + if alt_text is not None: + data['alt_text'] = {'text': alt_text} + if sensitive_warning is not None: + data['sensitive_media_warning'] = sensitive_warning + return await self.base.post( + Endpoint.CREATE_MEDIA_METADATA, + json=data, + headers=self.base._base_headers + ) + + async def create_card(self, choices, duration_minutes): + card_data = { + 'twitter:card': f'poll{len(choices)}choice_text_only', + 'twitter:api:api:endpoint': '1', + 'twitter:long:duration_minutes': duration_minutes + } + + for i, choice in enumerate(choices, 1): + card_data[f'twitter:string:choice{i}_label'] = choice + + data = {'card_data': json.dumps(card_data)} + headers = self.base._base_headers | {'content-type': 'application/x-www-form-urlencoded'} + return await self.base.post( + Endpoint.CREATE_CARD, + data=data, + headers=headers, + ) + + async def vote(self, selected_choice: str, card_uri: str, tweet_id: str, card_name: str): + data = { + 'twitter:string:card_uri': card_uri, + 'twitter:long:original_tweet_id': tweet_id, + 'twitter:string:response_card_name': card_name, + 'twitter:string:cards_platform': 'Web-12', + 'twitter:string:selected_choice': selected_choice + } + headers = self.base._base_headers | { + 'content-type': 'application/x-www-form-urlencoded' + } + return await self.base.post( + Endpoint.VOTE, + data=data, + headers=headers + ) + + async def reverse_geocode(self, lat, long, accuracy, granularity, max_results): + params = { + 'lat': lat, + 'long': long, + 'accuracy': accuracy, + 'granularity': granularity, + 'max_results': max_results + } + for k, v in tuple(params.items()): + if v is None: + params.pop(k) + return await self.base.get( + Endpoint.REVERSE_GEOCODE, + params=params, + headers=self.base._base_headers + ) + + async def search_geo(self, lat, long, query, ip, granularity, max_results): + params = { + 'lat': lat, + 'long': long, + 'query': query, + 'ip': ip, + 'granularity': granularity, + 'max_results': max_results + } + for k, v in tuple(params.items()): + if v is None: + params.pop(k) + + return await self.base.get( + Endpoint.SEARCH_GEO, + params=params, + headers=self.base._base_headers + ) + + async def get_place(self, id): + return await self.base.get( + Endpoint.GET_PLACE.format(id), + headers=self.base._base_headers + ) + + async def create_friendships(self, user_id): + data = { + 'include_profile_interstitial_type': 1, + 'include_blocking': 1, + 'include_blocked_by': 1, + 'include_followed_by': 1, + 'include_want_retweets': 1, + 'include_mute_edge': 1, + 'include_can_dm': 1, + 'include_can_media_tag': 1, + 'include_ext_is_blue_verified': 1, + 'include_ext_verified_type': 1, + 'include_ext_profile_image_shape': 1, + 'skip_status': 1, + 'user_id': user_id + } + headers = self.base._base_headers | { + 'content-type': 'application/x-www-form-urlencoded' + } + return await self.base.post( + Endpoint.CREATE_FRIENDSHIPS, + data=data, + headers=headers + ) + + async def destroy_friendships(self, user_id): + data = { + 'include_profile_interstitial_type': 1, + 'include_blocking': 1, + 'include_blocked_by': 1, + 'include_followed_by': 1, + 'include_want_retweets': 1, + 'include_mute_edge': 1, + 'include_can_dm': 1, + 'include_can_media_tag': 1, + 'include_ext_is_blue_verified': 1, + 'include_ext_verified_type': 1, + 'include_ext_profile_image_shape': 1, + 'skip_status': 1, + 'user_id': user_id + } + headers = self.base._base_headers | { + 'content-type': 'application/x-www-form-urlencoded' + } + return await self.base.post( + Endpoint.DESTROY_FRIENDSHIPS, + data=data, + headers=headers + ) + + async def create_blocks(self, user_id): + data = {'user_id': user_id} + headers = self.base._base_headers + headers['content-type'] = 'application/x-www-form-urlencoded' + return await self.base.post( + Endpoint.CREATE_BLOCKS, + data=data, + headers=headers + ) + + async def destroy_blocks(self, user_id): + data = {'user_id': user_id} + headers = self.base._base_headers + headers['content-type'] = 'application/x-www-form-urlencoded' + return await self.base.post( + Endpoint.DESTROY_BLOCKS, + data=data, + headers=headers + ) + + async def create_mutes(self, user_id): + data = {'user_id': user_id} + headers = self.base._base_headers + headers['content-type'] = 'application/x-www-form-urlencoded' + return await self.base.post( + Endpoint.CREATE_MUTES, + data=data, + headers=headers + ) + + async def destroy_mutes(self, user_id): + data = {'user_id': user_id} + headers = self.base._base_headers + headers['content-type'] = 'application/x-www-form-urlencoded' + return await self.base.post( + Endpoint.DESTROY_MUTES, + data=data, + headers=headers + ) + + async def guide(self, category, count, additional_request_params): + params = { + 'count': count, + 'include_page_configuration': True, + 'initial_tab_id': category + } + if additional_request_params is not None: + params |= additional_request_params + return await self.base.get( + Endpoint.GUIDE, + params=params, + headers=self.base._base_headers + ) + + async def available_trends(self): + return await self.base.get( + Endpoint.AVAILABLE_TRENDS, + headers=self.base._base_headers + ) + + async def place_trends(self, woeid): + return await self.base.get( + Endpoint.PLACE_TRENDS, + params={'id': woeid}, + headers=self.base._base_headers + ) + + async def _friendships(self, user_id, screen_name, count, endpoint, cursor): + params = {'count': count} + if user_id is not None: + params['user_id'] = user_id + elif screen_name is not None: + params['screen_name'] = screen_name + + if cursor is not None: + params['cursor'] = cursor + + return await self.base.get( + endpoint, + params=params, + headers=self.base._base_headers + ) + + async def followers_list(self, user_id, screen_name, count, cursor): + return await self._friendships(user_id, screen_name, count, Endpoint.FOLLOWERS_LIST, cursor) + + async def friends_list(self, user_id, screen_name, count, cursor): + return await self._friendships(user_id, screen_name, count, Endpoint.FRIENDS_LIST, cursor) + + async def _friendship_ids(self, user_id, screen_name, count, endpoint, cursor): + params = {'count': count} + if user_id is not None: + params['user_id'] = user_id + elif user_id is not None: + params['screen_name'] = screen_name + + if cursor is not None: + params['cursor'] = cursor + + return await self.base.get( + endpoint, + params=params, + headers=self.base._base_headers + ) + + async def followers_ids(self, user_id, screen_name, count, cursor): + return await self._friendship_ids(user_id, screen_name, count, Endpoint.FOLLOWERS_IDS, cursor) + + async def friends_ids(self, user_id, screen_name, count, cursor): + return await self._friendship_ids(user_id, screen_name, count, Endpoint.FRIENDS_IDS, cursor) + + async def dm_new(self, conversation_id, text, media_id, reply_to): + data = { + 'cards_platform': 'Web-12', + 'conversation_id': conversation_id, + 'dm_users': False, + 'include_cards': 1, + 'include_quote_count': True, + 'recipient_ids': False, + 'text': text + } + if media_id is not None: + data['media_id'] = media_id + if reply_to is not None: + data['reply_to_dm_id'] = reply_to + + return await self.base.post( + Endpoint.DM_NEW, + json=data, + headers=self.base._base_headers + ) + + async def dm_conversation(self, conversation_id, max_id): + params = {'context': 'FETCH_DM_CONVERSATION_HISTORY', 'include_conversation_info': True} + if max_id is not None: + params['max_id'] = max_id + + return await self.base.get( + Endpoint.DM_CONVERSATION.format(conversation_id), + params=params, + headers=self.base._base_headers + ) + + async def conversation_update_name(self, group_id, name): + data = {'name': name} + headers = self.base._base_headers + headers['content-type'] = 'application/x-www-form-urlencoded' + return await self.base.post( + Endpoint.CONVERSATION_UPDATE_NAME.format(group_id), + data=data, + headers=headers + ) + + async def _notifications(self, endpoint, count, cursor): + params = {'count': count} + if cursor is not None: + params['cursor'] = cursor + + return await self.base.get( + endpoint, + params=params, + headers=self.base._base_headers + ) + + async def notifications_all(self, count, cursor): + return await self._notifications(Endpoint.NOTIFICATIONS_ALL, count, cursor) + + async def notifications_verified(self, count, cursor): + return await self._notifications(Endpoint.NOTIFICATIONS_VERIFIED, count, cursor) + + async def notifications_mentions(self, count, cursor): + return await self._notifications(Endpoint.NOTIFICATIONS_MENTIONS, count, cursor) + + async def live_pipeline_update_subscriptions(self, session, subscribe, unsubscribe): + data = { + 'sub_topics': subscribe, + 'unsub_topics': unsubscribe + } + headers = self.base._base_headers + headers['content-type'] = 'application/x-www-form-urlencoded' + headers['LivePipeline-Session'] = session + return await self.base.post( + Endpoint.LIVE_PIPELINE_UPDATE_SUBSCRIPTIONS, data=data, headers=headers + ) + + async def user_state(self): + return await self.base.get( + Endpoint.USER_STATE, + headers=self.base._base_headers + ) diff --git a/build/lib/twikit/community.py b/build/lib/twikit/community.py new file mode 100644 index 00000000..ce556f46 --- /dev/null +++ b/build/lib/twikit/community.py @@ -0,0 +1,282 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Literal, NamedTuple + +from .tweet import Tweet +from .user import User +from .utils import Result, b64_to_str + +if TYPE_CHECKING: + from .client.client import Client + + +class CommunityCreator(NamedTuple): + id: str + screen_name: str + verified: bool + + +class CommunityRule(NamedTuple): + id: str + name: str + + +class CommunityMember: + def __init__(self, client: Client, data: dict) -> None: + self._client = client + self.id: str = data['rest_id'] + + self.community_role: str = data['community_role'] + self.super_following: bool = data['super_following'] + self.super_follow_eligible: bool = data['super_follow_eligible'] + self.super_followed_by: bool = data['super_followed_by'] + self.smart_blocking: bool = data['smart_blocking'] + self.is_blue_verified: bool = data['is_blue_verified'] + + legacy = data['legacy'] + self.screen_name: str = legacy['screen_name'] + self.name: str = legacy['name'] + self.follow_request_sent: bool = legacy['follow_request_sent'] + self.protected: bool = legacy['protected'] + self.following: bool = legacy['following'] + self.followed_by: bool = legacy['followed_by'] + self.blocking: bool = legacy['blocking'] + self.profile_image_url_https: str = legacy['profile_image_url_https'] + self.verified: bool = legacy['verified'] + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, CommunityMember) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + def __repr__(self) -> str: + return f'' + + +class Community: + """ + Attributes + ---------- + id : :class:`str` + The ID of the community. + name : :class:`str` + The name of the community. + member_count : :class:`int` + The count of members in the community. + is_nsfw : :class:`bool` + Indicates if the community is NSFW. + members_facepile_results : list[:class:`str`] + The profile image URLs of members. + banner : :class:`dict` + The banner information of the community. + is_member : :class:`bool` + Indicates if the user is a member of the community. + role : :class:`str` + The role of the user in the community. + description : :class:`str` + The description of the community. + creator : :class:`User` | :class:`CommunityCreator` + The creator of the community. + admin : :class:`User` + The admin of the community. + join_policy : :class:`str` + The join policy of the community. + created_at : :class:`int` + The timestamp of the community's creation. + invites_policy : :class:`str` + The invites policy of the community. + is_pinned : :class:`bool` + Indicates if the community is pinned. + rules : list[:class:`CommunityRule`] + The rules of the community. + """ + + def __init__(self, client: Client, data: dict) -> None: + self._client = client + self.id: str = data['rest_id'] + + self.name: str = data['name'] + self.member_count: int = data['member_count'] + self.is_nsfw: bool = data['is_nsfw'] + + self.members_facepile_results: list[str] = [ + i['result']['legacy']['profile_image_url_https'] + for i in data['members_facepile_results'] + ] + self.banner: dict = data['default_banner_media']['media_info'] + + self.is_member: bool = data.get('is_member') + self.role: str = data.get('role') + self.description: str = data.get('description') + + if 'creator_results' in data: + creator = data['creator_results']['result'] + if 'rest_id' in creator: + self.creator = User(client, creator) + else: + self.creator = CommunityCreator( + b64_to_str(creator['id']).removeprefix('User:'), + creator['legacy']['screen_name'], + creator['legacy']['verified'] + ) + else: + self.creator = None + + if 'admin_results' in data: + admin = data['admin_results']['result'] + self.admin = User(client, admin) + else: + self.admin = None + + self.join_policy: str = data.get('join_policy') + self.created_at: int = data.get('created_at') + self.invites_policy: str = data.get('invites_policy') + self.is_pinned: bool = data.get('is_pinned') + + if 'rules' in data: + self.rules: list = [ + CommunityRule(i['rest_id'], i['name']) for i in data['rules'] + ] + else: + self.rules = None + + async def get_tweets( + self, + tweet_type: Literal['Top', 'Latest', 'Media'], + count: int = 40, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves tweets from the community. + + Parameters + ---------- + tweet_type : {'Top', 'Latest', 'Media'} + The type of tweets to retrieve. + count : :class:`int`, default=40 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + List of retrieved tweets. + + Examples + -------- + >>> tweets = await community.get_tweets('Latest') + >>> for tweet in tweets: + ... print(tweet) + + + ... + >>> more_tweets = await tweets.next() # Retrieve more tweets + """ + return await self._client.get_community_tweets( + self.id, + tweet_type, + count, + cursor + ) + + async def join(self) -> Community: + """ + Join the community. + """ + return await self._client.join_community(self.id) + + async def leave(self) -> Community: + """ + Leave the community. + """ + return await self._client.leave_community(self.id) + + async def request_to_join(self, answer: str | None = None) -> Community: + """ + Request to join the community. + """ + return await self._client.request_to_join_community(self.id, answer) + + async def get_members( + self, count: int = 20, cursor: str | None = None + ) -> Result[CommunityMember]: + """ + Retrieves members of the community. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of members to retrieve. + + Returns + ------- + Result[:class:`CommunityMember`] + List of retrieved members. + """ + return await self._client.get_community_members( + self.id, + count, + cursor + ) + + async def get_moderators( + self, count: int = 20, cursor: str | None = None + ) -> Result[CommunityMember]: + """ + Retrieves moderators of the community. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of moderators to retrieve. + + Returns + ------- + Result[:class:`CommunityMember`] + List of retrieved moderators. + """ + return await self._client.get_community_moderators( + self.id, + count, + cursor + ) + + async def search_tweet( + self, + query: str, + count: int = 20, + cursor: str | None = None + )-> Result[Tweet]: + """Searchs tweets in the community. + + Parameters + ---------- + query : :class:`str` + The search query. + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + List of retrieved tweets. + """ + return await self._client.search_community_tweet( + self.id, + query, + count, + cursor + ) + + async def update(self) -> None: + new = await self._client.get_community(self.id) + self.__dict__.update(new.__dict__) + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Community) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + def __repr__(self) -> str: + return f'' diff --git a/build/lib/twikit/constants.py b/build/lib/twikit/constants.py new file mode 100644 index 00000000..3d7dbb28 --- /dev/null +++ b/build/lib/twikit/constants.py @@ -0,0 +1,260 @@ +# This token is common to all accounts and does not need to be changed. +TOKEN = 'AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA' + +DOMAIN = 'x.com' + +FEATURES = { + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'responsive_web_media_download_video_enabled': False, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +USER_FEATURES = { + 'hidden_profile_likes_enabled': True, + 'hidden_profile_subscriptions_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'subscriptions_verification_info_is_identity_verified_enabled': True, + 'subscriptions_verification_info_verified_since_enabled': True, + 'highlights_tweets_tab_ui_enabled': True, + 'responsive_web_twitter_article_notes_tab_enabled': False, + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True +} + +LIST_FEATURES = { + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True +} + +COMMUNITY_NOTE_FEATURES = { + 'responsive_web_birdwatch_media_notes_enabled': True, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'rweb_tipjar_consumption_enabled': False, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False +} + +COMMUNITY_TWEETS_FEATURES = { + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +JOIN_COMMUNITY_FEATURES = { + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True +} + +NOTE_TWEET_FEATURES = { + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'articles_preview_enabled': False, + 'rweb_video_timestamps_enabled': True, + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'tweet_with_visibility_results_prefer_gql_media_interstitial_enabled': True, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +SIMILAR_POSTS_FEATURES = { + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'articles_preview_enabled': False, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'tweet_with_visibility_results_prefer_gql_media_interstitial_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +BOOKMARK_FOLDER_TIMELINE_FEATURES = { + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'articles_preview_enabled': False, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'tweet_with_visibility_results_prefer_gql_media_interstitial_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +TWEET_RESULT_BY_REST_ID_FEATURES = { + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'articles_preview_enabled': True, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +USER_HIGHLIGHTS_TWEETS_FEATURES = { + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'articles_preview_enabled': True, + 'tweetypie_unmention_optimization_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} + +TWEET_RESULTS_BY_REST_IDS_FEATURES = { + 'creator_subscriptions_tweet_preview_api_enabled': True, + 'premium_content_api_read_enabled': False, + 'communities_web_enable_tweet_community_results_fetch': True, + 'c9s_tweet_anatomy_moderator_badge_enabled': True, + 'responsive_web_grok_analyze_button_fetch_trends_enabled': False, + 'responsive_web_grok_analyze_post_followups_enabled': True, + 'responsive_web_grok_share_attachment_enabled': True, + 'articles_preview_enabled': True, + 'responsive_web_edit_tweet_api_enabled': True, + 'graphql_is_translatable_rweb_tweet_is_translatable_enabled': True, + 'view_counts_everywhere_api_enabled': True, + 'longform_notetweets_consumption_enabled': True, + 'responsive_web_twitter_article_tweet_consumption_enabled': True, + 'tweet_awards_web_tipping_enabled': False, + 'creator_subscriptions_quote_tweet_preview_enabled': False, + 'freedom_of_speech_not_reach_fetch_enabled': True, + 'standardized_nudges_misinfo': True, + 'tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled': True, + 'rweb_video_timestamps_enabled': True, + 'longform_notetweets_rich_text_read_enabled': True, + 'longform_notetweets_inline_media_enabled': True, + 'profile_label_improvements_pcf_label_in_post_enabled': False, + 'rweb_tipjar_consumption_enabled': True, + 'responsive_web_graphql_exclude_directive_enabled': True, + 'verified_phone_label_enabled': False, + 'responsive_web_graphql_skip_user_profile_image_extensions_enabled': False, + 'responsive_web_graphql_timeline_navigation_enabled': True, + 'responsive_web_enhance_cards_enabled': False +} diff --git a/build/lib/twikit/errors.py b/build/lib/twikit/errors.py new file mode 100644 index 00000000..6518e9e3 --- /dev/null +++ b/build/lib/twikit/errors.py @@ -0,0 +1,110 @@ +from __future__ import annotations + + +class TwitterException(Exception): + """ + Base class for Twitter API related exceptions. + """ + def __init__(self, *args: object, headers: dict | None = None) -> None: + super().__init__(*args) + if headers is None: + self.headers = None + else: + self.headers = dict(headers) + +class BadRequest(TwitterException): + """ + Exception raised for 400 Bad Request errors. + """ + +class Unauthorized(TwitterException): + """ + Exception raised for 401 Unauthorized errors. + """ + +class Forbidden(TwitterException): + """ + Exception raised for 403 Forbidden errors. + """ + +class NotFound(TwitterException): + """ + Exception raised for 404 Not Found errors. + """ + +class RequestTimeout(TwitterException): + """ + Exception raised for 408 Request Timeout errors. + """ + +class TooManyRequests(TwitterException): + """ + Exception raised for 429 Too Many Requests errors. + """ + def __init__(self, *args, headers: dict | None = None) -> None: + super().__init__(*args, headers=headers) + if headers is not None and 'x-rate-limit-reset' in headers: + self.rate_limit_reset = int(headers.get('x-rate-limit-reset')) + else: + self.rate_limit_reset = None + +class ServerError(TwitterException): + """ + Exception raised for 5xx Server Error responses. + """ + +class CouldNotTweet(TwitterException): + """ + Exception raised when a tweet could not be sent. + """ + +class DuplicateTweet(CouldNotTweet): + """ + Exception raised when a tweet is a duplicate of another. + """ + +class TweetNotAvailable(TwitterException): + """ + Exceptions raised when a tweet is not available. + """ + +class InvalidMedia(TwitterException): + """ + Exception raised when there is a problem with the media ID + sent with the tweet. + """ + +class UserNotFound(TwitterException): + """ + Exception raised when a user does not exsit. + """ + +class UserUnavailable(TwitterException): + """ + Exception raised when a user is unavailable. + """ + +class AccountSuspended(TwitterException): + """ + Exception raised when the account is suspended. + """ + +class AccountLocked(TwitterException): + """ + Exception raised when the account is locked (very likey is Arkose challenge). + """ + +ERROR_CODE_TO_EXCEPTION: dict[int, TwitterException] = { + 187: DuplicateTweet, + 324: InvalidMedia +} + + +def raise_exceptions_from_response(errors: list[dict]): + for error in errors: + code = error.get('code') + if code not in ERROR_CODE_TO_EXCEPTION: + code = error.get('extensions', {}).get('code') + exception = ERROR_CODE_TO_EXCEPTION.get(code) + if exception is not None: + raise exception(error['message']) diff --git a/build/lib/twikit/geo.py b/build/lib/twikit/geo.py new file mode 100644 index 00000000..50cd8b77 --- /dev/null +++ b/build/lib/twikit/geo.py @@ -0,0 +1,82 @@ +from __future__ import annotations + +import warnings +from typing import TYPE_CHECKING + +from .errors import TwitterException + +if TYPE_CHECKING: + from .client.client import Client + + +class Place: + """ + Attributes + ---------- + id : :class:`str` + The ID of the place. + name : :class:`str` + The name of the place. + full_name : :class:`str` + The full name of the place. + country : :class:`str` + The country where the place is located. + country_code : :class:`str` + The ISO 3166-1 alpha-2 country code of the place. + url : :class:`str` + The URL providing more information about the place. + place_type : :class:`str` + The type of place. + attributes : :class:`dict` + bounding_box : :class:`dict` + The bounding box that defines the geographical area of the place. + centroid : list[:class:`float`] | None + The geographical center of the place, represented by latitude and + longitude. + contained_within : list[:class:`.Place`] + A list of places that contain this place. + """ + + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + self.id: str = data['id'] + self.name: str = data['name'] + self.full_name: str = data['full_name'] + self.country: str = data['country'] + self.country_code: str = data['country_code'] + self.url: str = data['url'] + self.place_type: str = data['place_type'] + self.attributes: dict | None = data.get('attributes') + self.bounding_box: dict = data['bounding_box'] + self.centroid: list[float] | None = data.get('centroid') + + self.contained_within: list[Place] = [ + Place(client, place) for place in data.get('contained_within', []) + ] + + async def update(self) -> None: + new = self._client.get_place(self.id) + await self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Place) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + +def _places_from_response(client: Client, response: dict) -> list[Place]: + if 'errors' in response: + e = response['errors'][0] + # No data available for the given coordinate. + if e['code'] == 6: + warnings.warn(e['message']) + else: + raise TwitterException(e['message']) + + places = response['result']['places'] if 'result' in response else [] + return [Place(client, place) for place in places] diff --git a/build/lib/twikit/group.py b/build/lib/twikit/group.py new file mode 100644 index 00000000..775d6935 --- /dev/null +++ b/build/lib/twikit/group.py @@ -0,0 +1,259 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from .message import Message +from .user import User +from .utils import build_user_data + +if TYPE_CHECKING: + from httpx import Response + + from .client.client import Client + from .utils import Result + + +class Group: + """ + Represents a group. + + Attributes + ---------- + id : :class:`str` + The ID of the group. + name : :class:`str` | None + The name of the group. + members : list[:class:`str`] + Member IDs + """ + def __init__(self, client: Client, group_id: str, data: dict) -> None: + self._client = client + self.id = group_id + + conversation_timeline = data["conversation_timeline"] + self.name: str | None = ( + conversation_timeline["conversations"][group_id]["name"] + if len(conversation_timeline["conversations"].keys()) > 0 + else None + ) + + members = conversation_timeline["users"].values() + self.members: list[User] = [User(client, build_user_data(i)) for i in members] + + async def get_history( + self, max_id: str | None = None + ) -> Result[GroupMessage]: + """ + Retrieves the DM conversation history in the group. + + Parameters + ---------- + max_id : :class:`str`, default=None + If specified, retrieves messages older than the specified max_id. + + Returns + ------- + Result[:class:`GroupMessage`] + A Result object containing a list of GroupMessage objects + representing the DM conversation history. + + Examples + -------- + >>> messages = await group.get_history() + >>> for message in messages: + >>> print(message) + + + ... + ... + + >>> more_messages = await messages.next() # Retrieve more messages + >>> for message in more_messages: + >>> print(message) + + + ... + ... + """ + return await self._client.get_group_dm_history(self.id, max_id) + + async def add_members(self, user_ids: list[str]) -> Response: + """Adds members to the group. + + Parameters + ---------- + user_ids : list[:class:`str`] + List of IDs of users to be added. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> members = ['...'] + >>> await group.add_members(members) + """ + return await self._client.add_members_to_group(self.id, user_ids) + + async def change_name(self, name: str) -> Response: + """Changes group name + + Parameters + ---------- + name : :class:`str` + New name. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + return await self._client.change_group_name(self.id, name) + + async def send_message( + self, + text: str, + media_id: str | None = None, + reply_to: str | None = None + ) -> GroupMessage: + """ + Sends a message to the group. + + Parameters + ---------- + text : :class:`str` + The text content of the direct message. + media_id : :class:`str`, default=None + The media ID associated with any media content + to be included in the message. + Media ID can be received by using the :func:`.upload_media` method. + reply_to : :class:`str`, default=None + Message ID to reply to. + + Returns + ------- + :class:`GroupMessage` + `Message` object containing information about the message sent. + + Examples + -------- + >>> # send DM with media + >>> group_id = '000000000' + >>> media_id = await client.upload_media('image.png') + >>> message = await group.send_message('text', media_id) + >>> print(message) + + """ + return await self._client.send_dm_to_group( + self.id, text, media_id, reply_to + ) + + async def update(self) -> None: + new = await self._client.get_group(self.id) + self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + +class GroupMessage(Message): + """ + Represents a direct message. + + Attributes + ---------- + id : :class:`str` + The ID of the message. + time : :class:`str` + The timestamp of the message. + text : :class:`str` + The text content of the message. + attachment : :class:`str` + The media URL associated with any attachment in the message. + group_id : :class:`str` + The ID of the group. + """ + def __init__( + self, + client: Client, + data: dict, + sender_id: str, + group_id: str + ) -> None: + super().__init__(client, data, sender_id, None) + self.group_id = group_id + + async def group(self) -> Group: + """ + Gets the group to which the message was sent. + """ + return await self._client.get_group(self.group_id) + + async def reply( + self, text: str, media_id: str | None = None + ) -> GroupMessage: + """Replies to the message. + + Parameters + ---------- + text : :class:`str` + The text content of the direct message. + media_id : :class:`str`, default=None + The media ID associated with any media content + to be included in the message. + Media ID can be received by using the :func:`.upload_media` method. + + Returns + ------- + :class:`Message` + `GroupMessage` object containing information about + the message sent. + + See Also + -------- + Client.send_dm_to_group + """ + return await self._client.send_dm_to_group( + self.group_id, text, media_id, self.id + ) + + async def add_reaction(self, emoji: str) -> Response: + """ + Adds a reaction to the message. + + Parameters + ---------- + emoji : :class:`str` + The emoji to be added as a reaction. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + return await self._client.add_reaction_to_message( + self.id, self.group_id, emoji + ) + + async def remove_reaction(self, emoji: str) -> Response: + """ + Removes a reaction from the message. + + Parameters + ---------- + emoji : :class:`str` + The emoji to be removed. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + return await self._client.remove_reaction_from_message( + self.id, self.group_id, emoji + ) + + def __repr__(self) -> str: + return f'' \ No newline at end of file diff --git a/build/lib/twikit/guest/__init__.py b/build/lib/twikit/guest/__init__.py new file mode 100644 index 00000000..9f982878 --- /dev/null +++ b/build/lib/twikit/guest/__init__.py @@ -0,0 +1,3 @@ +from .client import GuestClient +from .tweet import Tweet +from .user import User diff --git a/build/lib/twikit/guest/client.py b/build/lib/twikit/guest/client.py new file mode 100644 index 00000000..e5ddd7a9 --- /dev/null +++ b/build/lib/twikit/guest/client.py @@ -0,0 +1,393 @@ +from __future__ import annotations + +import json +import warnings +from functools import partial +from typing import Any, Literal + +from httpx import AsyncClient, AsyncHTTPTransport, Response +from httpx._utils import URLPattern + +from ..client.gql import GQLClient +from ..client.v11 import V11Client +from ..constants import TOKEN +from ..errors import ( + BadRequest, + Forbidden, + NotFound, + RequestTimeout, + ServerError, + TooManyRequests, + TwitterException, + Unauthorized +) +from ..utils import Result, find_dict, find_entry_by_type, httpx_transport_to_url +from .tweet import Tweet +from .user import User + + +def tweet_from_data(client: GuestClient, data: dict) -> Tweet: + ':meta private:' + tweet_data_ = find_dict(data, 'result', True) + if not tweet_data_: + return None + tweet_data = tweet_data_[0] + + if tweet_data.get('__typename') == 'TweetTombstone': + return None + if 'tweet' in tweet_data: + tweet_data = tweet_data['tweet'] + if 'core' not in tweet_data: + return None + if 'result' not in tweet_data['core']['user_results']: + return None + if 'legacy' not in tweet_data: + return None + + user_data = tweet_data['core']['user_results']['result'] + return Tweet(client, tweet_data, User(client, user_data)) + + + +class GuestClient: + """ + A client for interacting with the Twitter API as a guest. + This class is used for interacting with the Twitter API + without requiring authentication. + + Parameters + ---------- + language : :class:`str` | None, default=None + The language code to use in API requests. + proxy : :class:`str` | None, default=None + The proxy server URL to use for request + (e.g., 'http://0.0.0.0:0000'). + + Examples + -------- + >>> client = GuestClient() + >>> await client.activate() # Activate the client by generating a guest token. + """ + + def __init__( + self, + language: str | None = None, + proxy: str | None = None, + **kwargs + ) -> None: + if 'proxies' in kwargs: + message = ( + "The 'proxies' argument is now deprecated. Use 'proxy' " + "instead. https://github.com/encode/httpx/pull/2879" + ) + warnings.warn(message) + + self.http = AsyncClient(proxy=proxy, **kwargs) + self.language = language + self.proxy = proxy + + self._token = TOKEN + self._user_agent = ('Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' + 'AppleWebKit/537.36 (KHTML, like Gecko) ' + 'Chrome/122.0.0.0 Safari/537.36') + self._guest_token: str | None = None # set when activate method is called + self.gql = GQLClient(self) + self.v11 = V11Client(self) + + async def request( + self, + method: str, + url: str, + raise_exception: bool = True, + **kwargs + ) -> tuple[dict | Any, Response]: + ':meta private:' + response = await self.http.request(method, url, **kwargs) + + try: + response_data = response.json() + except json.decoder.JSONDecodeError: + response_data = response.text + + status_code = response.status_code + + if status_code >= 400 and raise_exception: + message = f'status: {status_code}, message: "{response.text}"' + if status_code == 400: + raise BadRequest(message, headers=response.headers) + elif status_code == 401: + raise Unauthorized(message, headers=response.headers) + elif status_code == 403: + raise Forbidden(message, headers=response.headers) + elif status_code == 404: + raise NotFound(message, headers=response.headers) + elif status_code == 408: + raise RequestTimeout(message, headers=response.headers) + elif status_code == 429: + raise TooManyRequests(message, headers=response.headers) + elif 500 <= status_code < 600: + raise ServerError(message, headers=response.headers) + else: + raise TwitterException(message, headers=response.headers) + + return response_data, response + + async def get(self, url, **kwargs) -> tuple[dict | Any, Response]: + ':meta private:' + return await self.request('GET', url, **kwargs) + + async def post(self, url, **kwargs) -> tuple[dict | Any, Response]: + ':meta private:' + return await self.request('POST', url, **kwargs) + + @property + def proxy(self) -> str: + ':meta private:' + transport: AsyncHTTPTransport = self.http._mounts.get( + URLPattern('all://') + ) + if transport is None: + return None + if not hasattr(transport._pool, '_proxy_url'): + return None + return httpx_transport_to_url(transport) + + @proxy.setter + def proxy(self, url: str) -> None: + self.http._mounts = { + URLPattern('all://'): AsyncHTTPTransport(proxy=url) + } + + @property + def _base_headers(self) -> dict[str, str]: + """ + Base headers for Twitter API requests. + """ + headers = { + 'authorization': f'Bearer {self._token}', + 'content-type': 'application/json', + 'X-Twitter-Active-User': 'yes', + 'Referer': 'https://twitter.com/', + } + + if self.language is not None: + headers['Accept-Language'] = self.language + headers['X-Twitter-Client-Language'] = self.language + + if self._guest_token is not None: + headers['X-Guest-Token'] = self._guest_token + + return headers + + async def activate(self) -> str: + """ + Activate the client by generating a guest token. + """ + response, _ = await self.v11.guest_activate() + self._guest_token = response['guest_token'] + return self._guest_token + + async def get_user_by_screen_name(self, screen_name: str) -> User: + """ + Retrieves a user object based on the provided screen name. + + Parameters + ---------- + screen_name : :class:`str` + The screen name of the user to retrieve. + + Returns + ------- + :class:`.user.User` + An instance of the `User` class containing user details. + + Examples + -------- + >>> user = await client.get_user_by_screen_name('example_user') + >>> print(user) + + """ + response, _ = await self.gql.user_by_screen_name(screen_name) + return User(self, response['data']['user']['result']) + + async def get_user_by_id(self, user_id: str) -> User: + """ + Retrieves a user object based on the provided user ID. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to retrieve. + + Returns + ------- + :class:`.user.User` + An instance of the `User` class + + Examples + -------- + >>> user = await client.get_user_by_id('123456789') + >>> print(user) + + """ + response, _ = await self.gql.user_by_rest_id(user_id) + return User(self, response['data']['user']['result']) + + async def get_user_tweets( + self, + user_id: str, + tweet_type: Literal['Tweets'] = 'Tweets', + count: int = 40, + ) -> list[Tweet]: + """ + Fetches tweets from a specific user's timeline. + + Parameters + ---------- + user_id : :class:`str` + The ID of the Twitter user whose tweets to retrieve. + To get the user id from the screen name, you can use + `get_user_by_screen_name` method. + tweet_type : {'Tweets'}, default='Tweets' + The type of tweets to retrieve. + count : :class:`int`, default=40 + The number of tweets to retrieve. + + Returns + ------- + list[:class:`.tweet.Tweet`] + A Result object containing a list of `Tweet` objects. + + Examples + -------- + >>> user_id = '...' + + If you only have the screen name, you can get the user id as follows: + + >>> screen_name = 'example_user' + >>> user = client.get_user_by_screen_name(screen_name) + >>> user_id = user.id + + >>> tweets = await client.get_user_tweets(user_id) + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + + See Also + -------- + .get_user_by_screen_name + """ + tweet_type = tweet_type.capitalize() + f = { + 'Tweets': self.gql.user_tweets, + }[tweet_type] + response, _ = await f(user_id, count, None) + instructions_ = find_dict(response, 'instructions', True) + if not instructions_: + return [] + instructions = instructions_[0] + items = find_entry_by_type(instructions, 'TimelineAddEntries')['entries'] + results = [] + + for item in items: + entry_id = item['entryId'] + if not entry_id.startswith(('tweet', 'profile-conversation', 'profile-grid')): + continue + tweet = tweet_from_data(self, item) + if tweet is None: + continue + results.append(tweet) + + return results + + async def get_tweet_by_id(self, tweet_id: str) -> Tweet: + """ + Fetches a tweet by tweet ID. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet. + + Returns + ------- + :class:`.tweet.Tweet` + Tweet object + + Examples + -------- + >>> await client.get_tweet_by_id('123456789') + + """ + response, _ = await self.gql.tweet_result_by_rest_id(tweet_id) + return tweet_from_data(self, response) + + async def get_user_highlights_tweets( + self, + user_id: str, + count: int = 20, + cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves highlighted tweets from a user's timeline. + + Parameters + ---------- + user_id : :class:`str` + The user ID + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`.tweet.Tweet`] + An instance of the `Result` class containing the highlighted tweets. + + Examples + -------- + >>> result = await client.get_user_highlights_tweets('123456789') + >>> for tweet in result: + ... print(tweet) + + + ... + ... + + >>> more_results = await result.next() # Retrieve more highlighted tweets + >>> for tweet in more_results: + ... print(tweet) + + + ... + ... + """ + response, _ = await self.gql.user_highlights_tweets(user_id, count, cursor) + + instructions = response['data']['user']['result']['timeline']['timeline']['instructions'] + instruction = find_entry_by_type(instructions, 'TimelineAddEntries') + if instruction is None: + return Result.empty() + entries = instruction['entries'] + previous_cursor = None + next_cursor = None + results = [] + + for entry in entries: + entryId = entry['entryId'] + if entryId.startswith('tweet'): + results.append(tweet_from_data(self, entry)) + elif entryId.startswith('cursor-top'): + previous_cursor = entry['content']['value'] + elif entryId.startswith('cursor-bottom'): + next_cursor = entry['content']['value'] + + return Result( + results, + partial(self.get_user_highlights_tweets, user_id, count, next_cursor), + next_cursor, + partial(self.get_user_highlights_tweets, user_id, count, previous_cursor), + previous_cursor + ) diff --git a/build/lib/twikit/guest/tweet.py b/build/lib/twikit/guest/tweet.py new file mode 100644 index 00000000..d3cf7ce5 --- /dev/null +++ b/build/lib/twikit/guest/tweet.py @@ -0,0 +1,225 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from ..utils import find_dict +from .user import User + +if TYPE_CHECKING: + from .client import GuestClient + + +class Tweet: + """ + Attributes + ---------- + id : :class:`str` + The unique identifier of the tweet. + created_at : :class:`str` + The date and time when the tweet was created. + created_at_datetime : :class:`datetime` + The created_at converted to datetime. + user: :class:`.guest.user.User` + Author of the tweet. + text : :class:`str` + The full text of the tweet. + lang : :class:`str` + The language of the tweet. + in_reply_to : :class:`str` + The tweet ID this tweet is in reply to, if any + is_quote_status : :class:`bool` + Indicates if the tweet is a quote status. + quote : :class:`.guest.tweet.Tweet` | None + The Tweet being quoted (if any) + retweeted_tweet : :class:`.guest.tweet.Tweet` | None + The Tweet being retweeted (if any) + possibly_sensitive : :class:`bool` + Indicates if the tweet content may be sensitive. + possibly_sensitive_editable : :class:`bool` + Indicates if the tweet's sensitivity can be edited. + quote_count : :class:`int` + The count of quotes for the tweet. + media : :class:`list` + A list of media entities associated with the tweet. + reply_count : :class:`int` + The count of replies to the tweet. + favorite_count : :class:`int` + The count of favorites or likes for the tweet. + favorited : :class:`bool` + Indicates if the tweet is favorited. + view_count: :class:`int` | None + The count of views. + view_count_state : :class:`str` | None + The state of the tweet views. + retweet_count : :class:`int` + The count of retweets for the tweet. + place : :class:`.Place` | None + The location associated with the tweet. + editable_until_msecs : :class:`int` + The timestamp until which the tweet is editable. + is_translatable : :class:`bool` + Indicates if the tweet is translatable. + is_edit_eligible : :class:`bool` + Indicates if the tweet is eligible for editing. + edits_remaining : :class:`int` + The remaining number of edits allowed for the tweet. + reply_to: list[:class:`Tweet`] | None + A list of Tweet objects representing the tweets to which to reply. + related_tweets : list[:class:`Tweet`] | None + Related tweets. + hashtags: list[:class:`str`] + Hashtags included in the tweet text. + has_card : :class:`bool` + Indicates if the tweet contains a card. + thumbnail_title : :class:`str` | None + The title of the webpage displayed inside tweet's card. + thumbnail_url : :class:`str` | None + Link to the image displayed in the tweet's card. + urls : :class:`list` + Information about URLs contained in the tweet. + full_text : :class:`str` | None + The full text of the tweet. + bookmark_count : :class:`int` | None + The number of bookmarks of the tweet. + """ + + def __init__(self, client: GuestClient, data: dict, user: User = None) -> None: + self._client = client + self._data = data + self.user = user + + self.reply_to: list[Tweet] | None = None + self.related_tweets: list[Tweet] | None = None + self.thread: list[Tweet] | None = None + + self.id: str = data['rest_id'] + legacy = data['legacy'] + self.created_at: str = legacy['created_at'] + self.text: str = legacy['full_text'] + self.lang: str = legacy['lang'] + self.is_quote_status: bool = legacy['is_quote_status'] + self.in_reply_to: str | None = self._data['legacy'].get('in_reply_to_status_id_str') + self.is_quote_status: bool = legacy['is_quote_status'] + self.possibly_sensitive: bool = legacy.get('possibly_sensitive') + self.possibly_sensitive_editable: bool = legacy.get('possibly_sensitive_editable') + self.quote_count: int = legacy['quote_count'] + self.media: list = legacy['entities'].get('media') + self.reply_count: int = legacy['reply_count'] + self.favorite_count: int = legacy['favorite_count'] + self.favorited: bool = legacy['favorited'] + self.retweet_count: int = legacy['retweet_count'] + self._place_data = legacy.get('place') + self.editable_until_msecs: int = data['edit_control'].get('editable_until_msecs') + self.is_translatable: bool = data.get('is_translatable') + self.is_edit_eligible: bool = data['edit_control'].get('is_edit_eligible') + self.edits_remaining: int = data['edit_control'].get('edits_remaining') + self.view_count: str = data['views'].get('count') if 'views' in data else None + self.view_count_state: str = data['views'].get('state') if 'views' in data else None + self.has_community_notes: bool = data.get('has_birdwatch_notes') + + # Get bookmark count from public_metrics if available, otherwise from legacy + public_metrics = data.get('public_metrics', {}) + self.bookmark_count: int | None = ( + public_metrics.get('bookmark_count') + if public_metrics.get('bookmark_count') is not None + else legacy.get('bookmark_count') + ) + + if data.get('quoted_status_result'): + quoted_tweet = data.pop('quoted_status_result')['result'] + if 'tweet' in quoted_tweet: + quoted_tweet = quoted_tweet['tweet'] + if quoted_tweet.get('__typename') != 'TweetTombstone': + quoted_user = User(client, quoted_tweet['core']['user_results']['result']) + self.quote: Tweet = Tweet(client, quoted_tweet, quoted_user) + else: + self.quote = None + + if legacy.get('retweeted_status_result'): + retweeted_tweet = legacy.pop('retweeted_status_result')['result'] + if 'tweet' in retweeted_tweet: + retweeted_tweet = retweeted_tweet['tweet'] + retweeted_user = User( + client, retweeted_tweet['core']['user_results']['result'] + ) + self.retweeted_tweet: Tweet = Tweet( + client, retweeted_tweet, retweeted_user + ) + else: + self.retweeted_tweet = None + + note_tweet_results = find_dict(data, 'note_tweet_results', find_one=True) + self.full_text: str = self.text + if note_tweet_results: + text_list = find_dict(note_tweet_results, 'text', find_one=True) + if text_list: + self.full_text = text_list[0] + + entity_set = note_tweet_results[0]['result']['entity_set'] + self.urls: list = entity_set.get('urls') + hashtags = entity_set.get('hashtags', []) + else: + self.urls: list = legacy['entities'].get('urls') + hashtags = legacy['entities'].get('hashtags', []) + + self.hashtags: list[str] = [ + i['text'] for i in hashtags + ] + + self.community_note = None + if 'birdwatch_pivot' in data: + community_note_data = data['birdwatch_pivot'] + if 'note' in community_note_data: + self.community_note = { + 'id': community_note_data['note']['rest_id'], + 'text': community_note_data['subtitle']['text'] + } + + if ( + 'card' in data and + 'legacy' in data['card'] and + 'name' in data['card']['legacy'] and + data['card']['legacy']['name'].startswith('poll') + ): + self._poll_data = data['card'] + else: + self._poll_data = None + + self.thumbnail_url = None + self.thumbnail_title = None + self.has_card = 'card' in data + if ( + 'card' in data and + 'legacy' in data['card'] and + 'binding_values' in data['card']['legacy'] + ): + card_data = data['card']['legacy']['binding_values'] + + if isinstance(card_data, list): + binding_values = { + i.get('key'): i.get('value') + for i in card_data + } + + if 'title' in binding_values and 'string_value' in binding_values['title']: + self.thumbnail_title = binding_values['title']['string_value'] + + if ( + 'thumbnail_image_original' in binding_values and + 'image_value' in binding_values['thumbnail_image_original'] and + 'url' in binding_values['thumbnail_image_original']['image_value'] + ): + self.thumbnail_url = binding_values['thumbnail_image_original']['image_value']['url'] + + async def update(self) -> None: + new = await self._client.get_tweet_by_id(self.id) + self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Tweet) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value \ No newline at end of file diff --git a/build/lib/twikit/guest/user.py b/build/lib/twikit/guest/user.py new file mode 100644 index 00000000..eb873852 --- /dev/null +++ b/build/lib/twikit/guest/user.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Literal + +from ..utils import Result, timestamp_to_datetime + +if TYPE_CHECKING: + from .client import GuestClient + from .tweet import Tweet + + +class User: + """ + Attributes + ---------- + id : :class:`str` + The unique identifier of the user. + created_at : :class:`str` + The date and time when the user account was created. + name : :class:`str` + The user's name. + screen_name : :class:`str` + The user's screen name. + profile_image_url : :class:`str` + The URL of the user's profile image (HTTPS version). + profile_banner_url : :class:`str` + The URL of the user's profile banner. + url : :class:`str` + The user's URL. + location : :class:`str` + The user's location information. + description : :class:`str` + The user's profile description. + description_urls : :class:`list` + URLs found in the user's profile description. + urls : :class:`list` + URLs associated with the user. + pinned_tweet_ids : :class:`str` + The IDs of tweets that the user has pinned to their profile. + is_blue_verified : :class:`bool` + Indicates if the user is verified with a blue checkmark. + verified : :class:`bool` + Indicates if the user is verified. + possibly_sensitive : :class:`bool` + Indicates if the user's content may be sensitive. + can_media_tag : :class:`bool` + Indicates whether the user can be tagged in media. + want_retweets : :class:`bool` + Indicates if the user wants retweets. + default_profile : :class:`bool` + Indicates if the user has the default profile. + default_profile_image : :class:`bool` + Indicates if the user has the default profile image. + has_custom_timelines : :class:`bool` + Indicates if the user has custom timelines. + followers_count : :class:`int` + The count of followers. + fast_followers_count : :class:`int` + The count of fast followers. + normal_followers_count : :class:`int` + The count of normal followers. + following_count : :class:`int` + The count of users the user is following. + favourites_count : :class:`int` + The count of favorites or likes. + listed_count : :class:`int` + The count of lists the user is a member of. + media_count : :class:`int` + The count of media items associated with the user. + statuses_count : :class:`int` + The count of tweets. + is_translator : :class:`bool` + Indicates if the user is a translator. + translator_type : :class:`str` + The type of translator. + profile_interstitial_type : :class:`str` + The type of profile interstitial. + withheld_in_countries : list[:class:`str`] + Countries where the user's content is withheld. + """ + + def __init__(self, client: GuestClient, data: dict) -> None: + self._client = client + legacy = data['legacy'] + + self.id: str = data['rest_id'] + self.created_at: str = legacy['created_at'] + self.name: str = legacy['name'] + self.screen_name: str = legacy['screen_name'] + self.profile_image_url: str = legacy['profile_image_url_https'] + self.profile_banner_url: str = legacy.get('profile_banner_url') + self.url: str = legacy.get('url') + self.location: str = legacy['location'] + self.description: str = legacy['description'] + self.description_urls: list = legacy['entities']['description']['urls'] + self.urls: list = legacy['entities'].get('url', {}).get('urls') + self.pinned_tweet_ids: list[str] = legacy['pinned_tweet_ids_str'] + self.is_blue_verified: bool = data['is_blue_verified'] + self.verified: bool = legacy['verified'] + self.possibly_sensitive: bool = legacy['possibly_sensitive'] + self.default_profile: bool = legacy['default_profile'] + self.default_profile_image: bool = legacy['default_profile_image'] + self.has_custom_timelines: bool = legacy['has_custom_timelines'] + self.followers_count: int = legacy['followers_count'] + self.fast_followers_count: int = legacy['fast_followers_count'] + self.normal_followers_count: int = legacy['normal_followers_count'] + self.following_count: int = legacy['friends_count'] + self.favourites_count: int = legacy['favourites_count'] + self.listed_count: int = legacy['listed_count'] + self.media_count = legacy['media_count'] + self.statuses_count: int = legacy['statuses_count'] + self.is_translator: bool = legacy['is_translator'] + self.translator_type: str = legacy['translator_type'] + self.withheld_in_countries: list[str] = legacy['withheld_in_countries'] + self.protected: bool = legacy.get('protected', False) + + @property + def created_at_datetime(self) -> datetime: + return timestamp_to_datetime(self.created_at) + + async def get_tweets(self, tweet_type: Literal['Tweets'] = 'Tweets', count: int = 40) -> list[Tweet]: + """ + Retrieves the user's tweets. + + Parameters + ---------- + tweet_type : {'Tweets'}, default='Tweets' + The type of tweets to retrieve. + count : :class:`int`, default=40 + The number of tweets to retrieve. + + Returns + ------- + list[:class:`.tweet.Tweet`] + A list of `Tweet` objects. + + Examples + -------- + >>> user = await client.get_user_by_screen_name('example_user') + >>> tweets = await user.get_tweets() + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + """ + return await self._client.get_user_tweets(self.id, tweet_type, count) + + async def get_highlights_tweets(self, count: int = 20, cursor: str | None = None) -> Result[Tweet]: + """ + Retrieves highlighted tweets from the user's timeline. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`.tweet.Tweet`] + An instance of the `Result` class containing the highlighted tweets. + + Examples + -------- + >>> result = await user.get_highlights_tweets() + >>> for tweet in result: + ... print(tweet) + + + ... + ... + + >>> more_results = await result.next() # Retrieve more highlighted tweets + >>> for tweet in more_results: + ... print(tweet) + + + ... + ... + """ + return await self._client.get_user_highlights_tweets(self.id, count, cursor) + + async def update(self) -> None: + new = await self._client.get_user_by_id(self.id) + self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, User) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value diff --git a/build/lib/twikit/list.py b/build/lib/twikit/list.py new file mode 100644 index 00000000..9d09bfba --- /dev/null +++ b/build/lib/twikit/list.py @@ -0,0 +1,255 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Literal + +from .utils import timestamp_to_datetime + +if TYPE_CHECKING: + from httpx import Response + + from .client.client import Client + from .tweet import Tweet + from .user import User + from .utils import Result + + +class List: + """ + Class representing a Twitter List. + + Attributes + ---------- + id : :class:`str` + The unique identifier of the List. + created_at : :class:`int` + The timestamp when the List was created. + default_banner : :class:`dict` + Information about the default banner of the List. + banner : :class:`dict` + Information about the banner of the List. If custom banner is not set, + it defaults to the default banner. + description : :class:`str` + The description of the List. + following : :class:`bool` + Indicates if the authenticated user is following the List. + is_member : :class:`bool` + Indicates if the authenticated user is a member of the List. + member_count : :class:`int` + The number of members in the List. + mode : {'Private', 'Public'} + The mode of the List, either 'Private' or 'Public'. + muting : :class:`bool` + Indicates if the authenticated user is muting the List. + name : :class:`str` + The name of the List. + pinning : :class:`bool` + Indicates if the List is pinned. + subscriber_count : :class:`int` + The number of subscribers to the List. + """ + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + self.id: str = data['id_str'] + self.created_at: int = data['created_at'] + self.default_banner: dict = data['default_banner_media']['media_info'] + + if 'custom_banner_media' in data: + self.banner: dict = data["custom_banner_media"]["media_info"] + else: + self.banner: dict = self.default_banner + + self.description: str = data['description'] + self.following: bool = data['following'] + self.is_member: bool = data['is_member'] + self.member_count: bool = data['member_count'] + self.mode: Literal['Private', 'Public'] = data['mode'] + self.muting: bool = data['muting'] + self.name: str = data['name'] + self.pinning: bool = data['pinning'] + self.subscriber_count: int = data['subscriber_count'] + + @property + def created_at_datetime(self) -> datetime: + return timestamp_to_datetime(self.created_at) + + async def edit_banner(self, media_id: str) -> Response: + """ + Edit the banner image of the list. + + Parameters + ---------- + media_id : :class:`str` + The ID of the media to use as the new banner image. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> media_id = await client.upload_media('image.png') + >>> await media.edit_banner(media_id) + """ + return await self._client.edit_list_banner(self.id, media_id) + + async def delete_banner(self) -> Response: + """ + Deletes the list banner. + """ + return await self._client.delete_list_banner(self.id) + + async def edit( + self, + name: str | None = None, + description: str | None = None, + is_private: bool | None = None + ) -> List: + """ + Edits list information. + + Parameters + ---------- + name : :class:`str`, default=None + The new name for the list. + description : :class:`str`, default=None + The new description for the list. + is_private : :class:`bool`, default=None + Indicates whether the list should be private + (True) or public (False). + + Returns + ------- + :class:`List` + The updated Twitter list. + + Examples + -------- + >>> await list.edit( + ... 'new name', 'new description', True + ... ) + """ + return await self._client.edit_list( + self.id, name, description, is_private + ) + + async def add_member(self, user_id: str) -> Response: + """ + Adds a member to the list. + """ + return await self._client.add_list_member(self.id, user_id) + + async def remove_member(self, user_id: str) -> Response: + """ + Removes a member from the list. + """ + return await self._client.remove_list_member(self.id, user_id) + + async def get_tweets( + self, count: int = 20, cursor: str | None = None + ) -> Result[Tweet]: + """ + Retrieves tweets from the list. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of tweets to retrieve. + cursor : :class:`str`, default=None + The cursor for pagination. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing the retrieved tweets. + + Examples + -------- + >>> tweets = await list.get_tweets() + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + """ + return await self._client.get_list_tweets(self.id, count, cursor) + + async def get_members( + self, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """Retrieves members of the list. + + Parameters + ---------- + count : :class:`int`, default=20 + Number of members to retrieve. + + Returns + ------- + Result[:class:`User`] + Members of the list + + Examples + -------- + >>> members = list_.get_members() + >>> for member in members: + ... print(member) + + + ... + ... + >>> more_members = members.next() # Retrieve more members + """ + return await self._client.get_list_members(self.id, count, cursor) + + async def get_subscribers( + self, count: int = 20, cursor: str | None = None + ) -> Result[User]: + """Retrieves subscribers of the list. + + Parameters + ---------- + count : :class:`int`, default=20 + Number of subscribers to retrieve. + + Returns + ------- + Result[:class:`User`] + Subscribers of the list + + Examples + -------- + >>> subscribers = list_.get_subscribers() + >>> for subscriber in subscribers: + ... print(subscriber) + + + ... + ... + >>> more_subscribers = subscribers.next() # Retrieve more subscribers + """ + return await self._client.get_list_subscribers(self.id, count, cursor) + + async def update(self) -> None: + new = await self._client.get_list(self.id) + self.__dict__.update(new.__dict__) + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, List) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + def __repr__(self) -> str: + return f'' diff --git a/build/lib/twikit/message.py b/build/lib/twikit/message.py new file mode 100644 index 00000000..bfcaa81c --- /dev/null +++ b/build/lib/twikit/message.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from httpx import Response + + from .client.client import Client + + +class Message: + """ + Represents a direct message. + + Attributes + ---------- + id : :class:`str` + The ID of the message. + time : :class:`str` + The timestamp of the message. + text : :class:`str` + The text content of the message. + attachment : :class:`dict` + Attachment Information. + """ + def __init__( + self, + client: Client, + data: dict, + sender_id: str, + recipient_id: str + ) -> None: + self._client = client + self.sender_id = sender_id + self.recipient_id = recipient_id + + self.id: str = data['id'] + self.time: str = data['time'] + self.text: str = data['text'] + self.attachment: dict | None = data.get('attachment') + + async def reply(self, text: str, media_id: str | None = None) -> Message: + """Replies to the message. + + Parameters + ---------- + text : :class:`str` + The text content of the direct message. + media_id : :class:`str`, default=None + The media ID associated with any media content + to be included in the message. + Media ID can be received by using the :func:`.upload_media` method. + + Returns + ------- + :class:`Message` + `Message` object containing information about the message sent. + + See Also + -------- + Client.send_dm + """ + user_id = await self._client.user_id() + send_to = ( + self.recipient_id + if user_id == self.sender_id else + self.sender_id + ) + return await self._client.send_dm(send_to, text, media_id, self.id) + + async def add_reaction(self, emoji: str) -> Response: + """ + Adds a reaction to the message. + + Parameters + ---------- + emoji : :class:`str` + The emoji to be added as a reaction. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + user_id = await self._client.user_id() + partner_id = ( + self.recipient_id + if user_id == self.sender_id else + self.sender_id + ) + conversation_id = f'{partner_id}-{user_id}' + return await self._client.add_reaction_to_message( + self.id, conversation_id, emoji + ) + + async def remove_reaction(self, emoji: str) -> Response: + """ + Removes a reaction from the message. + + Parameters + ---------- + emoji : :class:`str` + The emoji to be removed. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + user_id = await self._client.user_id() + partner_id = ( + self.recipient_id + if user_id == self.sender_id else + self.sender_id + ) + conversation_id = f'{partner_id}-{user_id}' + return await self._client.remove_reaction_from_message( + self.id, conversation_id, emoji + ) + + async def delete(self) -> Response: + """ + Deletes the message. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.delete_dm + """ + return await self._client.delete_dm(self.id) + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Message) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + def __repr__(self) -> str: + return f'' diff --git a/build/lib/twikit/notification.py b/build/lib/twikit/notification.py new file mode 100644 index 00000000..6f8da274 --- /dev/null +++ b/build/lib/twikit/notification.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .client.client import Client + from .tweet import Tweet + from .user import User + + +class Notification: + """ + Attributes + ---------- + id : :class:`str` + The unique identifier of the notification. + timestamp_ms : :class:`int` + The timestamp of the notification in milliseconds. + icon : :class:`dict` + Dictionary containing icon data for the notification. + message : :class:`str` + The message text of the notification. + tweet : :class:`.Tweet` + The tweet associated with the notification. + from_user : :class:`.User` + The user who triggered the notification. + """ + def __init__( + self, client: Client, data: dict, tweet: Tweet, from_user: User + ) -> None: + self._client = client + self.tweet = tweet + self.from_user = from_user + + self.id: str = data['id'] + self.timestamp_ms: int = int(data['timestampMs']) + self.icon: dict = data['icon'] + self.message: str = data['message']['text'] + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Notification) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + def __repr__(self) -> str: + return f'' diff --git a/build/lib/twikit/streaming.py b/build/lib/twikit/streaming.py new file mode 100644 index 00000000..544fb6a8 --- /dev/null +++ b/build/lib/twikit/streaming.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, AsyncGenerator, NamedTuple + +if TYPE_CHECKING: + from .client.client import Client + + +class StreamingSession: + """ + Represents a streaming session. + + Attributes + ---------- + id : :class:`str` + The ID or the session. + topics : set[:class:`str`] + The topics to stream. + + See Also + -------- + .Client.get_streaming_session + """ + def __init__( + self, client: Client, session_id: str, + stream: AsyncGenerator[Payload], topics: set[str], auto_reconnect: bool + ) -> None: + self._client = client + self.id = session_id + self._stream = stream + self.topics = topics + self.auto_reconnect = auto_reconnect + + async def reconnect(self) -> tuple[str, Payload]: + """ + Reconnects the session. + """ + stream = self._client._stream(self.topics) + config_event = await anext(stream) + self.id = config_event[1].config.session_id + self._stream = stream + return config_event + + async def update_subscriptions( + self, + subscribe: set[str] | None = None, + unsubscribe: set[str] | None = None + ) -> Payload: + """ + Updates subscriptions for the session. + + Parameters + ---------- + subscribe : set[:class:`str`], default=None + Topics to subscribe to. + unsubscribe : set[:class:`str`], default=None + Topics to unsubscribe from. + + Examples + -------- + >>> from twikit.streaming import Topic + ... + >>> subscribe_topics = { + ... Topic.tweet_engagement('1749528513'), + ... Topic.tweet_engagement('1765829534') + ... } + >>> unsubscribe_topics = { + ... Topic.tweet_engagement('17396176529'), + ... Topic.dm_update('17544932482-174455537996'), + ... Topic.dm_typing('17544932482-174455537996)' + ... } + >>> await session.update_subscriptions( + ... subscribe_topics, unsubscribe_topics + ... ) + + Note + ---- + dm_update and dm_update cannot be added. + + See Also + -------- + .Topic + """ + return await self._client._update_subscriptions( + self, subscribe, unsubscribe + ) + + async def __aiter__(self) -> AsyncGenerator[tuple[str, Payload]]: + while True: + async for event in self._stream: + yield event + if not self.auto_reconnect: + break + yield await self.reconnect() + + def __repr__(self) -> str: + return f'' + + +def _event_from_data(name: str, data: dict) -> StreamEventType: + if name == 'config': + session_id = data['session_id'] + subscription_ttl_millis = data['subscription_ttl_millis'] + heartbeat_millis = data['heartbeat_millis'] + return ConfigEvent( + session_id, subscription_ttl_millis, + heartbeat_millis + ) + + if name == 'subscriptions': + errors = data['errors'] + return SubscriptionsEvent(errors) + + if name == 'tweet_engagement': + like_count = data.get('like_count') + retweet_count = data.get('retweet_count') + quote_count = data.get('quote_count') + reply_count = data.get('reply_count') + bookmark_count = data.get('bookmark_count') + view_count = None + view_count_state = None + if 'view_count_info' in data: + view_count = data['view_count_info']['count'] + view_count_state = data['view_count_info']['state'] + return TweetEngagementEvent( + like_count, retweet_count, view_count, + view_count_state, quote_count, reply_count, + bookmark_count + ) + + if name == 'dm_update': + conversation_id = data['conversation_id'] + user_id = data['user_id'] + return DMUpdateEvent(conversation_id, user_id) + + if name == 'dm_typing': + conversation_id = data['conversation_id'] + user_id = data['user_id'] + return DMTypingEvent(conversation_id, user_id) + + +def _payload_from_data(data: dict) -> Payload: + events = { + name: _event_from_data(name, data) + for (name, data) in data.items() + } + return Payload(**events) + + +class Payload(NamedTuple): + """ + Represents a payload containing several types of events. + """ + config: ConfigEvent | None = None #: The configuration event. + subscriptions: SubscriptionsEvent | None = None #: The subscriptions event. + tweet_engagement: TweetEngagementEvent | None = None #: The tweet engagement event. + dm_update: DMUpdateEvent | None = None #: The direct message update event. + dm_typing: DMTypingEvent | None = None #: The direct message typing event. + + def __repr__(self) -> str: + items = self._asdict().items() + fields = [f'{i[0]}={i[1]}' for i in items if i[1] is not None] + return f'Payload({" ".join(fields)})' + + +class ConfigEvent(NamedTuple): + """ + Event representing configuration data. + """ + session_id: str #: The session ID associated with the configuration. + subscription_ttl_millis: int #: The time to live for the subscription. + heartbeat_millis: int #: The heartbeat interval in milliseconds. + + +class SubscriptionsEvent(NamedTuple): + """ + Event representing subscription status. + """ + errors: list #: A list of errors. + + +class TweetEngagementEvent(NamedTuple): + """ + Event representing tweet engagement metrics. + """ + like_count: str | None #: The number of likes on the tweet. + retweet_count: str | None #: The number of retweets of the tweet. + view_count: str | None #: The number of views of the tweet. + view_count_state: str | None #: The state of view count. + quote_count: int | None #: The number of quotes of the tweet. + reply_count: int | None #: The number of replies of the tweet. + bookmark_count: int | None #: The number of bookmarks of the tweet. + + +class DMUpdateEvent(NamedTuple): + """ + Event representing a (DM) update. + """ + conversation_id: str #: The ID of the conversation associated with the DM. + user_id: str #: ID of the user who sent the DM. + + +class DMTypingEvent(NamedTuple): + """ + Event representing typing indication in a DM conversation. + """ + conversation_id: str #: The conversation where typing indication occurred. + user_id: str #: The ID of the typing user. + +StreamEventType = (ConfigEvent | SubscriptionsEvent | + TweetEngagementEvent | DMTypingEvent | DMTypingEvent) + + +class Topic: + """ + Utility class for generating topic strings for streaming. + """ + @staticmethod + def tweet_engagement(tweet_id: str) -> str: + """ + Generates a topic string for tweet engagement events. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet. + + Returns + ------- + :class:`str` + The topic string for tweet engagement events. + """ + return f'/tweet_engagement/{tweet_id}' + + @staticmethod + def dm_update(conversation_id: str) -> str: + """ + Generates a topic string for direct message update events. + + Parameters + ---------- + conversation_id : :class:`str` + The ID of the conversation. + Group ID (00000000) or partner_ID-your_ID (00000000-00000001) + + Returns + ------- + :class:`str` + The topic string for direct message update events. + """ + return f'/dm_update/{conversation_id}' + + @staticmethod + def dm_typing(conversation_id: str) -> str: + """ + Generates a topic string for direct message typing events. + + Parameters + ---------- + conversation_id : :class:`str` + The ID of the conversation. + Group ID (00000000) or partner_ID-your_ID (00000000-00000001) + + Returns + ------- + :class:`str` + The topic string for direct message typing events. + """ + return f'/dm_typing/{conversation_id}' diff --git a/build/lib/twikit/trend.py b/build/lib/twikit/trend.py new file mode 100644 index 00000000..50b92999 --- /dev/null +++ b/build/lib/twikit/trend.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from typing import TypedDict, TYPE_CHECKING + +if TYPE_CHECKING: + from .client.client import Client + + +class Trend: + """ + Attributes + ---------- + name : :class:`str` + The name of the trending topic. + tweets_count : :class:`int` + The count of tweets associated with the trend. + domain_context : :class:`str` + The context or domain associated with the trend. + grouped_trends : :class:`list`[:class:`str`] + A list of trend names grouped under the main trend. + """ + + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + metadata: dict = data['trendMetadata'] + self.name: str = data['name'] + self.tweets_count: int | None = metadata.get('metaDescription') + self.domain_context: str = metadata.get('domainContext') + self.grouped_trends: list[str] = [ + trend['name'] for trend in data.get('groupedTrends', []) + ] + + def __repr__(self) -> str: + return f'' + + +class PlaceTrends(TypedDict): + trends: list[PlaceTrend] + as_of: str + created_at: str + locations: dict + + +class PlaceTrend: + """ + Attributes + ---------- + name : :class:`str` + The name of the trend. + url : :class:`str` + The URL to view the trend. + query : :class:`str` + The search query corresponding to the trend. + tweet_volume : :class:`int` + The volume of tweets associated with the trend. + """ + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + self.name: str = data['name'] + self.url: str = data['url'] + self.promoted_content: None = data['promoted_content'] + self.query: str = data['query'] + self.tweet_volume: int = data['tweet_volume'] + + def __repr__(self) -> str: + return f'' + + +class Location: + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + self.woeid: int = data['woeid'] + self.country: str = data['country'] + self.country_code: str = data['countryCode'] + self.name: str = data['name'] + self.parentid: int = data['parentid'] + self.placeType: dict = data['placeType'] + self.url: str = data['url'] + + async def get_trends(self) -> PlaceTrends: + return await self._client.get_place_trends(self.woeid) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Location) and self.woeid == __value.woeid + + def __ne__(self, __value: object) -> bool: + return not self == __value \ No newline at end of file diff --git a/build/lib/twikit/tweet.py b/build/lib/twikit/tweet.py new file mode 100644 index 00000000..58248f11 --- /dev/null +++ b/build/lib/twikit/tweet.py @@ -0,0 +1,694 @@ +from __future__ import annotations + +import re +from datetime import datetime +from typing import TYPE_CHECKING + +from .geo import Place +from .user import User +from .utils import find_dict, timestamp_to_datetime + +if TYPE_CHECKING: + from httpx import Response + + from .client.client import Client + from .utils import Result + + +class Tweet: + """ + Attributes + ---------- + id : :class:`str` + The unique identifier of the tweet. + created_at : :class:`str` + The date and time when the tweet was created. + created_at_datetime : :class:`datetime` + The created_at converted to datetime. + user: :class:`User` + Author of the tweet. + text : :class:`str` + The full text of the tweet. + lang : :class:`str` + The language of the tweet. + in_reply_to : :class:`str` + The tweet ID this tweet is in reply to, if any + is_quote_status : :class:`bool` + Indicates if the tweet is a quote status. + quote : :class:`Tweet` | None + The Tweet being quoted (if any) + retweeted_tweet : :class:`Tweet` | None + The Tweet being retweeted (if any) + possibly_sensitive : :class:`bool` + Indicates if the tweet content may be sensitive. + possibly_sensitive_editable : :class:`bool` + Indicates if the tweet's sensitivity can be edited. + quote_count : :class:`int` + The count of quotes for the tweet. + media : :class:`list` + A list of media entities associated with the tweet. + reply_count : :class:`int` + The count of replies to the tweet. + favorite_count : :class:`int` + The count of favorites or likes for the tweet. + favorited : :class:`bool` + Indicates if the tweet is favorited. + view_count: :class:`int` | None + The count of views. + view_count_state : :class:`str` | None + The state of the tweet views. + retweet_count : :class:`int` + The count of retweets for the tweet. + place : :class:`.Place` | None + The location associated with the tweet. + editable_until_msecs : :class:`int` + The timestamp until which the tweet is editable. + is_translatable : :class:`bool` + Indicates if the tweet is translatable. + is_edit_eligible : :class:`bool` + Indicates if the tweet is eligible for editing. + edits_remaining : :class:`int` + The remaining number of edits allowed for the tweet. + replies: Result[:class:`Tweet`] | None + Replies to the tweet. + reply_to: list[:class:`Tweet`] | None + A list of Tweet objects representing the tweets to which to reply. + related_tweets : list[:class:`Tweet`] | None + Related tweets. + hashtags: list[:class:`str`] + Hashtags included in the tweet text. + has_card : :class:`bool` + Indicates if the tweet contains a card. + thumbnail_title : :class:`str` | None + The title of the webpage displayed inside tweet's card. + thumbnail_url : :class:`str` | None + Link to the image displayed in the tweet's card. + urls : :class:`list` + Information about URLs contained in the tweet. + full_text : :class:`str` | None + The full text of the tweet. + bookmark_count: :class:`int` | None + The count of bookmarks for the tweet. + """ + + def __init__(self, client: Client, data: dict, user: User = None) -> None: + self._client = client + self._data = data + self.user = user + + self.replies: Result[Tweet] | None = None + self.reply_to: list[Tweet] | None = None + self.related_tweets: list[Tweet] | None = None + self.thread: list[Tweet] | None = None + + self.id: str = data['rest_id'] + legacy = data['legacy'] + self.created_at: str = legacy['created_at'] + self.text: str = legacy['full_text'] + self.lang: str = legacy['lang'] + self.is_quote_status: bool = legacy['is_quote_status'] + self.in_reply_to: str | None = self._data['legacy'].get('in_reply_to_status_id_str') + self.is_quote_status: bool = legacy['is_quote_status'] + self.possibly_sensitive: bool = legacy.get('possibly_sensitive') + self.possibly_sensitive_editable: bool = legacy.get('possibly_sensitive_editable') + self.quote_count: int = legacy['quote_count'] + self.media: list = legacy['entities'].get('media') + self.reply_count: int = legacy['reply_count'] + self.favorite_count: int = legacy['favorite_count'] + self.favorited: bool = legacy['favorited'] + self.retweet_count: int = legacy['retweet_count'] + self._place_data = legacy.get('place') + self.editable_until_msecs: int = data['edit_control'].get('editable_until_msecs') + self.is_translatable: bool = data.get('is_translatable') + self.is_edit_eligible: bool = data['edit_control'].get('is_edit_eligible') + self.edits_remaining: int = data['edit_control'].get('edits_remaining') + self.view_count: str = data['views'].get('count') if 'views' in data else None + self.view_count_state: str = data['views'].get('state') if 'views' in data else None + self.has_community_notes: bool = data.get('has_birdwatch_notes') + + # Get bookmark count from public_metrics if available, otherwise from legacy + public_metrics = data.get('public_metrics', {}) + self.bookmark_count: int | None = ( + public_metrics.get('bookmark_count') + if public_metrics.get('bookmark_count') is not None + else legacy.get('bookmark_count') + ) + + if data.get('quoted_status_result'): + quoted_tweet = data.pop('quoted_status_result')['result'] + if 'tweet' in quoted_tweet: + quoted_tweet = quoted_tweet['tweet'] + if quoted_tweet.get('__typename') != 'TweetTombstone': + quoted_user = User(client, quoted_tweet['core']['user_results']['result']) + self.quote: Tweet = Tweet(client, quoted_tweet, quoted_user) + else: + self.quote = None + + if legacy.get('retweeted_status_result'): + retweeted_tweet = legacy.pop('retweeted_status_result')['result'] + if 'tweet' in retweeted_tweet: + retweeted_tweet = retweeted_tweet['tweet'] + retweeted_user = User( + client, retweeted_tweet['core']['user_results']['result'] + ) + self.retweeted_tweet: Tweet = Tweet( + client, retweeted_tweet, retweeted_user + ) + else: + self.retweeted_tweet = None + + note_tweet_results = find_dict(data, 'note_tweet_results', find_one=True) + self.full_text: str = self.text + if note_tweet_results: + text_list = find_dict(note_tweet_results, 'text', find_one=True) + if text_list: + self.full_text = text_list[0] + + entity_set = note_tweet_results[0]['result']['entity_set'] + self.urls: list = entity_set.get('urls') + hashtags = entity_set.get('hashtags', []) + else: + self.urls: list = legacy['entities'].get('urls') + hashtags = legacy['entities'].get('hashtags', []) + + self.hashtags: list[str] = [ + i['text'] for i in hashtags + ] + + self.community_note = None + if 'birdwatch_pivot' in data: + community_note_data = data['birdwatch_pivot'] + if 'note' in community_note_data: + self.community_note = { + 'id': community_note_data['note']['rest_id'], + 'text': community_note_data['subtitle']['text'] + } + + if ( + 'card' in data and + 'legacy' in data['card'] and + 'name' in data['card']['legacy'] and + data['card']['legacy']['name'].startswith('poll') + ): + self._poll_data = data['card'] + else: + self._poll_data = None + + self.thumbnail_url = None + self.thumbnail_title = None + self.has_card = 'card' in data + if ( + 'card' in data and + 'legacy' in data['card'] and + 'binding_values' in data['card']['legacy'] + ): + card_data = data['card']['legacy']['binding_values'] + + if isinstance(card_data, list): + binding_values = { + i.get('key'): i.get('value') + for i in card_data + } + + if 'title' in binding_values and 'string_value' in binding_values['title']: + self.thumbnail_title = binding_values['title']['string_value'] + + if ( + 'thumbnail_image_original' in binding_values and + 'image_value' in binding_values['thumbnail_image_original'] and + 'url' in binding_values['thumbnail_image_original' + ]['image_value'] + ): + self.thumbnail_url = binding_values['thumbnail_image_original' + ]['image_value']['url'] + + @property + def created_at_datetime(self) -> datetime: + return timestamp_to_datetime(self.created_at) + + @property + def poll(self) -> Poll: + return self._poll_data and Poll(self._client, self._poll_data, self) + + @property + def place(self) -> Place: + return self._place_data and Place(self._client, self._place_data) + + async def delete(self) -> Response: + """Deletes the tweet. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + Examples + -------- + >>> await tweet.delete() + """ + return await self._client.delete_tweet(self.id) + + async def favorite(self) -> Response: + """ + Favorites the tweet. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.favorite_tweet + """ + return await self._client.favorite_tweet(self.id) + + async def unfavorite(self) -> Response: + """ + Favorites the tweet. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.unfavorite_tweet + """ + return await self._client.unfavorite_tweet(self.id) + + async def retweet(self) -> Response: + """ + Retweets the tweet. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.retweet + """ + return await self._client.retweet(self.id) + + async def delete_retweet(self) -> Response: + """ + Deletes the retweet. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.delete_retweet + """ + return await self._client.delete_retweet(self.id) + + async def bookmark(self) -> Response: + """ + Adds the tweet to bookmarks. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.bookmark_tweet + """ + return await self._client.bookmark_tweet(self.id) + + async def delete_bookmark(self) -> Response: + """ + Removes the tweet from bookmarks. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.delete_bookmark + """ + return await self._client.delete_bookmark(self.id) + + async def reply( + self, + text: str = '', + media_ids: list[str] | None = None, + **kwargs + ) -> Tweet: + """ + Replies to the tweet. + + Parameters + ---------- + text : :class:`str`, default='' + The text content of the reply. + media_ids : list[:class:`str`], default=None + A list of media IDs or URIs to attach to the reply. + Media IDs can be obtained by using the `upload_media` method. + + Returns + ------- + :class:`Tweet` + The created tweet. + + Examples + -------- + >>> tweet_text = 'Example text' + >>> media_ids = [ + ... client.upload_media('image1.png'), + ... client.upload_media('image2.png') + ... ] + >>> await tweet.reply( + ... tweet_text, + ... media_ids=media_ids + ... ) + + See Also + -------- + `Client.upload_media` + """ + return await self._client.create_tweet( + text, media_ids, reply_to=self.id, **kwargs + ) + + async def get_retweeters( + self, count: str = 40, cursor: str | None = None + ) -> Result[User]: + """ + Retrieve users who retweeted the tweet. + + Parameters + ---------- + count : :class:`int`, default=40 + The maximum number of users to retrieve. + cursor : :class:`str`, default=None + A string indicating the position of the cursor for pagination. + + Returns + ------- + Result[:class:`User`] + A list of users who retweeted the tweet. + + Examples + -------- + >>> tweet_id = '...' + >>> retweeters = tweet.get_retweeters() + >>> print(retweeters) + [, , ..., ] + + >>> more_retweeters = retweeters.next() # Retrieve more retweeters. + >>> print(more_retweeters) + [, , ..., ] + """ + return await self._client.get_retweeters(self.id, count, cursor) + + async def get_favoriters( + self, count: str = 40, cursor: str | None = None + ) -> Result[User]: + """ + Retrieve users who favorited a specific tweet. + + Parameters + ---------- + tweet_id : :class:`str` + The ID of the tweet. + count : :class:`int`, default=40 + The maximum number of users to retrieve. + cursor : :class:`str`, default=None + A string indicating the position of the cursor for pagination. + + Returns + ------- + Result[:class:`User`] + A list of users who favorited the tweet. + + Examples + -------- + >>> tweet_id = '...' + >>> favoriters = tweet.get_favoriters() + >>> print(favoriters) + [, , ..., ] + + >>> more_favoriters = favoriters.next() # Retrieve more favoriters. + >>> print(more_favoriters) + [, , ..., ] + """ + return await self._client.get_favoriters(self.id, count, cursor) + + async def get_similar_tweets(self) -> list[Tweet]: + """ + Retrieves tweets similar to the tweet (Twitter premium only). + + Returns + ------- + list[:class:`Tweet`] + A list of Tweet objects representing tweets + similar to the tweet. + """ + return await self._client.get_similar_tweets(self.id) + + async def update(self) -> None: + new = await self._client.get_tweet_by_id(self.id) + self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Tweet) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + +def tweet_from_data(client: Client, data: dict) -> Tweet: + ':meta private:' + tweet_data_ = find_dict(data, 'result', True) + if not tweet_data_: + return None + tweet_data = tweet_data_[0] + + if tweet_data.get('__typename') == 'TweetTombstone': + return None + if 'tweet' in tweet_data: + tweet_data = tweet_data['tweet'] + if 'core' not in tweet_data: + return None + if 'result' not in tweet_data['core']['user_results']: + return None + if 'legacy' not in tweet_data: + return None + + user_data = tweet_data['core']['user_results']['result'] + return Tweet(client, tweet_data, User(client, user_data)) + + +class ScheduledTweet: + def __init__(self, client: Client, data: dict) -> None: + self._client = client + + self.id = data['rest_id'] + self.execute_at: int = data['scheduling_info']['execute_at'] + self.state: str = data['scheduling_info']['state'] + self.type: str = data['tweet_create_request']['type'] + self.text: str = data['tweet_create_request']['status'] + self.media = [i['media_info'] for i in data.get('media_entities', [])] + + async def delete(self) -> Response: + """ + Delete the scheduled tweet. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + """ + return await self._client.delete_scheduled_tweet(self.id) + + def __repr__(self) -> str: + return f'' + + +class TweetTombstone: + def __init__(self, client: Client, tweet_id: str, data: dict) -> None: + self._client = client + self.id = tweet_id + self.text: str = data['text']['text'] + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, TweetTombstone) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + +class Poll: + """Represents a poll associated with a tweet. + Attributes + ---------- + tweet : :class:`Tweet` + The tweet associated with the poll. + id : :class:`str` + The unique identifier of the poll. + name : :class:`str` + The name of the poll. + choices : list[:class:`dict`] + A list containing dictionaries representing poll choices. + Each dictionary contains 'label' and 'count' keys + for choice label and count. + duration_minutes : :class:`int` + The duration of the poll in minutes. + end_datetime_utc : :class:`str` + The end date and time of the poll in UTC format. + last_updated_datetime_utc : :class:`str` + The last updated date and time of the poll in UTC format. + selected_choice : :class:`str` | None + Number of the selected choice. + """ + + def __init__( + self, client: Client, data: dict, tweet: Tweet | None = None + ) -> None: + self._client = client + self.tweet = tweet + + legacy = data['legacy'] + binding_values = legacy['binding_values'] + + if isinstance(legacy['binding_values'], list): + binding_values = { + i.get('key'): i.get('value') + for i in legacy['binding_values'] + } + + self.id: str = data['rest_id'] + self.name: str = legacy['name'] + + choices_number = int(re.findall( + r'poll(\d)choice_text_only', self.name + )[0]) + choices = [] + + for i in range(1, choices_number + 1): + choice_label = binding_values[f'choice{i}_label'] + choice_count = binding_values.get(f'choice{i}_count', {}) + choices.append({ + 'number': str(i), + 'label': choice_label['string_value'], + 'count': choice_count.get('string_value', '0') + }) + + self.choices = choices + + self.duration_minutes = int(binding_values['duration_minutes']['string_value']) + self.end_datetime_utc: str = binding_values['end_datetime_utc']['string_value'] + updated = binding_values['last_updated_datetime_utc']['string_value'] + self.last_updated_datetime_utc: str = updated + + self.counts_are_final: bool = binding_values['counts_are_final']['boolean_value'] + + if 'selected_choice' in binding_values: + self.selected_choice: str = binding_values['selected_choice']['string_value'] + else: + self.selected_choice = None + + async def vote(self, selected_choice: str) -> Poll: + """ + Vote on the poll with the specified selected choice. + Parameters + ---------- + selected_choice : :class:`str` + The label of the selected choice for the vote. + Returns + ------- + :class:`Poll` + The Poll object representing the updated poll after voting. + """ + return await self._client.vote( + selected_choice, + self.id, + self.tweet.id, + self.name + ) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, Poll) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value + + +class CommunityNote: + """Represents a community note. + + Attributes + ---------- + id : :class:`str` + The ID of the community note. + text : :class:`str` + The text content of the community note. + misleading_tags : list[:class:`str`] + A list of tags indicating misleading information. + trustworthy_sources : :class:`bool` + Indicates if the sources are trustworthy. + helpful_tags : list[:class:`str`] + A list of tags indicating helpful information. + created_at : :class:`int` + The timestamp when the note was created. + can_appeal : :class:`bool` + Indicates if the note can be appealed. + appeal_status : :class:`str` + The status of the appeal. + is_media_note : :class:`bool` + Indicates if the note is related to media content. + media_note_matches : :class:`str` + Matches related to media content. + birdwatch_profile : :class:`dict` + Birdwatch profile associated with the note. + tweet_id : :class:`str` + The ID of the tweet associated with the note. + """ + def __init__(self, client: Client, data: dict) -> None: + self._client = client + self.id: str = data['rest_id'] + + data_v1 = data['data_v1'] + self.text: str = data_v1['summary']['text'] + self.misleading_tags: list[str] = data_v1.get('misleading_tags') + self.trustworthy_sources: bool = data_v1.get('trustworthy_sources') + self.helpful_tags: list[str] = data.get('helpful_tags') + self.created_at: int = data.get('created_at') + self.can_appeal: bool = data.get('can_appeal') + self.appeal_status: str = data.get('appeal_status') + self.is_media_note: bool = data.get('is_media_note') + self.media_note_matches: str = data.get('media_note_matches') + self.birdwatch_profile: dict = data.get('birdwatch_profile') + self.tweet_id: str = data['tweet_results']['result']['rest_id'] + + async def update(self) -> None: + new = await self._client.get_community_note(self.id) + self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, CommunityNote) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value diff --git a/build/lib/twikit/user.py b/build/lib/twikit/user.py new file mode 100644 index 00000000..5535a346 --- /dev/null +++ b/build/lib/twikit/user.py @@ -0,0 +1,521 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Literal + +from .utils import timestamp_to_datetime + +if TYPE_CHECKING: + from httpx import Response + + from .client.client import Client + from .message import Message + from .tweet import Tweet + from .utils import Result + + +class User: + """ + Attributes + ---------- + id : :class:`str` + The unique identifier of the user. + created_at : :class:`str` + The date and time when the user account was created. + name : :class:`str` + The user's name. + screen_name : :class:`str` + The user's screen name. + profile_image_url : :class:`str` + The URL of the user's profile image (HTTPS version). + profile_banner_url : :class:`str` + The URL of the user's profile banner. + url : :class:`str` + The user's URL. + location : :class:`str` + The user's location information. + description : :class:`str` + The user's profile description. + description_urls : :class:`list` + URLs found in the user's profile description. + urls : :class:`list` + URLs associated with the user. + pinned_tweet_ids : :class:`str` + The IDs of tweets that the user has pinned to their profile. + is_blue_verified : :class:`bool` + Indicates if the user is verified with a blue checkmark. + verified : :class:`bool` + Indicates if the user is verified. + possibly_sensitive : :class:`bool` + Indicates if the user's content may be sensitive. + can_dm : :class:`bool` + Indicates whether the user can receive direct messages. + can_media_tag : :class:`bool` + Indicates whether the user can be tagged in media. + want_retweets : :class:`bool` + Indicates if the user wants retweets. + default_profile : :class:`bool` + Indicates if the user has the default profile. + default_profile_image : :class:`bool` + Indicates if the user has the default profile image. + has_custom_timelines : :class:`bool` + Indicates if the user has custom timelines. + followers_count : :class:`int` + The count of followers. + fast_followers_count : :class:`int` + The count of fast followers. + normal_followers_count : :class:`int` + The count of normal followers. + following_count : :class:`int` + The count of users the user is following. + favourites_count : :class:`int` + The count of favorites or likes. + listed_count : :class:`int` + The count of lists the user is a member of. + media_count : :class:`int` + The count of media items associated with the user. + statuses_count : :class:`int` + The count of tweets. + is_translator : :class:`bool` + Indicates if the user is a translator. + translator_type : :class:`str` + The type of translator. + profile_interstitial_type : :class:`str` + The type of profile interstitial. + withheld_in_countries : list[:class:`str`] + Countries where the user's content is withheld. + """ + + def __init__(self, client: Client, data: dict) -> None: + self._client = client + legacy = data['legacy'] + + self.id: str = data['rest_id'] + self.created_at: str = legacy['created_at'] + self.name: str = legacy['name'] + self.screen_name: str = legacy['screen_name'] + self.profile_image_url: str = legacy['profile_image_url_https'] + self.profile_banner_url: str = legacy.get('profile_banner_url') + self.url: str = legacy.get('url') + self.location: str = legacy['location'] + self.description: str = legacy['description'] + self.description_urls: list = legacy['entities']['description']['urls'] + self.urls: list = legacy['entities'].get('url', {}).get('urls') + self.pinned_tweet_ids: list[str] = legacy['pinned_tweet_ids_str'] + self.is_blue_verified: bool = data['is_blue_verified'] + self.verified: bool = legacy['verified'] + self.possibly_sensitive: bool = legacy['possibly_sensitive'] + self.can_dm: bool = legacy['can_dm'] + self.can_media_tag: bool = legacy['can_media_tag'] + self.want_retweets: bool = legacy['want_retweets'] + self.default_profile: bool = legacy['default_profile'] + self.default_profile_image: bool = legacy['default_profile_image'] + self.has_custom_timelines: bool = legacy['has_custom_timelines'] + self.followers_count: int = legacy['followers_count'] + self.fast_followers_count: int = legacy['fast_followers_count'] + self.normal_followers_count: int = legacy['normal_followers_count'] + self.following_count: int = legacy['friends_count'] + self.favourites_count: int = legacy['favourites_count'] + self.listed_count: int = legacy['listed_count'] + self.media_count = legacy['media_count'] + self.statuses_count: int = legacy['statuses_count'] + self.is_translator: bool = legacy['is_translator'] + self.translator_type: str = legacy['translator_type'] + self.withheld_in_countries: list[str] = legacy['withheld_in_countries'] + self.protected: bool = legacy.get('protected', False) + + @property + def created_at_datetime(self) -> datetime: + return timestamp_to_datetime(self.created_at) + + async def get_tweets( + self, + tweet_type: Literal['Tweets', 'Replies', 'Media', 'Likes'], + count: int = 40, + ) -> Result[Tweet]: + """ + Retrieves the user's tweets. + + Parameters + ---------- + tweet_type : {'Tweets', 'Replies', 'Media', 'Likes'} + The type of tweets to retrieve. + count : :class:`int`, default=40 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + A Result object containing a list of `Tweet` objects. + + Examples + -------- + >>> user = await client.get_user_by_screen_name('example_user') + >>> tweets = await user.get_tweets('Tweets', count=20) + >>> for tweet in tweets: + ... print(tweet) + + + ... + ... + + >>> more_tweets = await tweets.next() # Retrieve more tweets + >>> for tweet in more_tweets: + ... print(tweet) + + + ... + ... + """ + return await self._client.get_user_tweets(self.id, tweet_type, count) + + async def follow(self) -> Response: + """ + Follows the user. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.follow_user + """ + return await self._client.follow_user(self.id) + + async def unfollow(self) -> Response: + """ + Unfollows the user. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + Client.unfollow_user + """ + return await self._client.unfollow_user(self.id) + + async def block(self) -> Response: + """ + Blocks a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to block. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + .unblock + """ + return await self._client.block_user(self.id) + + async def unblock(self) -> Response: + """ + Unblocks a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to unblock. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + .block + """ + return await self._client.unblock_user(self.id) + + async def mute(self) -> Response: + """ + Mutes a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to mute. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + .unmute + """ + return await self._client.mute_user(self.id) + + async def unmute(self) -> Response: + """ + Unmutes a user. + + Parameters + ---------- + user_id : :class:`str` + The ID of the user to unmute. + + Returns + ------- + :class:`httpx.Response` + Response returned from twitter api. + + See Also + -------- + .mute + """ + return await self._client.unmute_user(self.id) + + async def get_followers(self, count: int = 20) -> Result[User]: + """ + Retrieves a list of followers for the user. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of followers to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the followers. + + See Also + -------- + Client.get_user_followers + """ + return await self._client.get_user_followers(self.id, count) + + async def get_verified_followers(self, count: int = 20) -> Result[User]: + """ + Retrieves a list of verified followers for the user. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of verified followers to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the verified followers. + + See Also + -------- + Client.get_user_verified_followers + """ + return await self._client.get_user_verified_followers(self.id, count) + + async def get_followers_you_know(self, count: int = 20) -> Result[User]: + """ + Retrieves a list of followers whom the user might know. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of followers you might know to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the followers you might know. + + See Also + -------- + Client.get_user_followers_you_know + """ + return await self._client.get_user_followers_you_know(self.id, count) + + async def get_following(self, count: int = 20) -> Result[User]: + """ + Retrieves a list of users whom the user is following. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of following users to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the users being followed. + + See Also + -------- + Client.get_user_following + """ + return await self._client.get_user_following(self.id, count) + + async def get_subscriptions(self, count: int = 20) -> Result[User]: + """ + Retrieves a list of users whom the user is subscribed to. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of subscriptions to retrieve. + + Returns + ------- + Result[:class:`User`] + A list of User objects representing the subscribed users. + + See Also + -------- + Client.get_user_subscriptions + """ + return await self._client.get_user_subscriptions(self.id, count) + + async def get_latest_followers( + self, count: int | None = None, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves the latest followers. + Max count : 200 + """ + return await self._client.get_latest_followers( + self.id, count=count, cursor=cursor + ) + + async def get_latest_friends( + self, count: int | None = None, cursor: str | None = None + ) -> Result[User]: + """ + Retrieves the latest friends (following users). + Max count : 200 + """ + return await self._client.get_latest_friends( + self.id, count=count, cursor=cursor + ) + + async def send_dm( + self, text: str, media_id: str = None, reply_to = None + ) -> Message: + """ + Send a direct message to the user. + + Parameters + ---------- + text : :class:`str` + The text content of the direct message. + media_id : :class:`str`, default=None + The media ID associated with any media content + to be included in the message. + Media ID can be received by using the :func:`.upload_media` method. + reply_to : :class:`str`, default=None + Message ID to reply to. + + Returns + ------- + :class:`Message` + `Message` object containing information about the message sent. + + Examples + -------- + >>> # send DM with media + >>> media_id = await client.upload_media('image.png') + >>> message = await user.send_dm('text', media_id) + >>> print(message) + + + See Also + -------- + Client.upload_media + Client.send_dm + """ + return await self._client.send_dm(self.id, text, media_id, reply_to) + + async def get_dm_history(self, max_id: str = None) -> Result[Message]: + """ + Retrieves the DM conversation history with the user. + + Parameters + ---------- + max_id : :class:`str`, default=None + If specified, retrieves messages older than the specified max_id. + + Returns + ------- + Result[:class:`Message`] + A Result object containing a list of Message objects representing + the DM conversation history. + + Examples + -------- + >>> messages = await user.get_dm_history() + >>> for message in messages: + >>> print(message) + + + ... + ... + + >>> more_messages = await messages.next() # Retrieve more messages + >>> for message in more_messages: + >>> print(message) + + + ... + ... + """ + return await self._client.get_dm_history(self.id, max_id) + + async def get_highlights_tweets(self, count: int = 20, cursor: str | None = None) -> Result[Tweet]: + """ + Retrieves highlighted tweets from the user's timeline. + + Parameters + ---------- + count : :class:`int`, default=20 + The number of tweets to retrieve. + + Returns + ------- + Result[:class:`Tweet`] + An instance of the `Result` class containing the highlighted tweets. + + Examples + -------- + >>> result = await user.get_highlights_tweets() + >>> for tweet in result: + ... print(tweet) + + + ... + ... + + >>> more_results = await result.next() # Retrieve more highlighted tweets + >>> for tweet in more_results: + ... print(tweet) + + + ... + ... + """ + return await self._client.get_user_highlights_tweets(self.id, count, cursor) + + async def update(self) -> None: + new = await self._client.get_user_by_id(self.id) + self.__dict__.update(new.__dict__) + + def __repr__(self) -> str: + return f'' + + def __eq__(self, __value: object) -> bool: + return isinstance(__value, User) and self.id == __value.id + + def __ne__(self, __value: object) -> bool: + return not self == __value diff --git a/build/lib/twikit/utils.py b/build/lib/twikit/utils.py new file mode 100644 index 00000000..62cfa752 --- /dev/null +++ b/build/lib/twikit/utils.py @@ -0,0 +1,394 @@ +from __future__ import annotations + +import base64 +import json +from datetime import datetime +from httpx import AsyncHTTPTransport +from typing import TYPE_CHECKING, Any, Awaitable, Generic, Iterator, Literal, TypedDict, TypeVar + +if TYPE_CHECKING: + from .client.client import Client + +T = TypeVar('T') + + +class Result(Generic[T]): + """ + This class is for storing multiple results. + The `next` method can be used to retrieve further results. + As with a regular list, you can access elements by + specifying indexes and iterate over elements using a for loop. + + Attributes + ---------- + next_cursor : :class:`str` + Cursor used to obtain the next result. + previous_cursor : :class:`str` + Cursor used to obtain the previous result. + token : :class:`str` + Alias of `next_cursor`. + cursor : :class:`str` + Alias of `next_cursor`. + """ + + def __init__( + self, + results: list[T], + fetch_next_result: Awaitable | None = None, + next_cursor: str | None = None, + fetch_previous_result: Awaitable | None = None, + previous_cursor: str | None = None + ) -> None: + self.__results = results + self.next_cursor = next_cursor + self.__fetch_next_result = fetch_next_result + self.previous_cursor = previous_cursor + self.__fetch_previous_result = fetch_previous_result + + async def next(self) -> Result[T]: + """ + The next result. + """ + if self.__fetch_next_result is None: + return Result([]) + return await self.__fetch_next_result() + + async def previous(self) -> Result[T]: + """ + The previous result. + """ + if self.__fetch_previous_result is None: + return Result([]) + return await self.__fetch_previous_result() + + @classmethod + def empty(cls): + return cls([]) + + def __iter__(self) -> Iterator[T]: + yield from self.__results + + def __getitem__(self, index: int) -> T: + return self.__results[index] + + def __len__(self) -> int: + return len(self.__results) + + def __repr__(self) -> str: + return self.__results.__repr__() + + +class Flow: + def __init__(self, client: Client, guest_token: str) -> None: + self._client = client + self.guest_token = guest_token + self.response = None + + async def execute_task(self, *subtask_inputs, **kwargs) -> None: + response, _ = await self._client.v11.onboarding_task( + self.guest_token, self.token, list(subtask_inputs), **kwargs + ) + self.response = response + + async def sso_init(self, provider: str) -> None: + await self._client.v11.sso_init(provider, self.guest_token) + + @property + def token(self) -> str | None: + if self.response is None: + return None + return self.response.get('flow_token') + + @property + def task_id(self) -> str | None: + if self.response is None: + return None + if len(self.response['subtasks']) <= 0: + return None + return self.response['subtasks'][0]['subtask_id'] + + +def find_dict(obj: list | dict, key: str | int, find_one: bool = False) -> list[Any]: + """ + Retrieves elements from a nested dictionary. + """ + results = [] + if isinstance(obj, dict): + if key in obj: + results.append(obj.get(key)) + if find_one: + return results + if isinstance(obj, (list, dict)): + for elem in (obj if isinstance(obj, list) else obj.values()): + r = find_dict(elem, key, find_one) + results += r + if r and find_one: + return results + return results + + +def httpx_transport_to_url(transport: AsyncHTTPTransport) -> str: + url = transport._pool._proxy_url + scheme = url.scheme.decode() + host = url.host.decode() + port = url.port + auth = None + if transport._pool._proxy_headers: + auth_header = dict(transport._pool._proxy_headers)[b'Proxy-Authorization'].decode() + auth = base64.b64decode(auth_header.split()[1]).decode() + + url_str = f'{scheme}://' + if auth is not None: + url_str += auth + '@' + url_str += host + if port is not None: + url_str += f':{port}' + return url_str + + +def get_query_id(url: str) -> str: + """ + Extracts the identifier from a URL. + + Examples + -------- + >>> get_query_id('https://twitter.com/i/api/graphql/queryid/...') + 'queryid' + """ + return url.rsplit('/', 2)[-2] + + +def timestamp_to_datetime(timestamp: str) -> datetime: + return datetime.strptime(timestamp, '%a %b %d %H:%M:%S %z %Y') + + +def build_tweet_data(raw_data: dict) -> dict: + return { + **raw_data, + 'rest_id': raw_data['id'], + 'is_translatable': None, + 'views': {}, + 'edit_control': {}, + 'legacy': { + 'created_at': raw_data.get('created_at'), + 'full_text': raw_data.get('full_text') or raw_data.get('text'), + 'lang': raw_data.get('lang'), + 'is_quote_status': raw_data.get('is_quote_status'), + 'in_reply_to_status_id_str': raw_data.get('in_reply_to_status_id_str'), + 'retweeted_status_result': raw_data.get('retweeted_status_result'), + 'possibly_sensitive': raw_data.get('possibly_sensitive'), + 'possibly_sensitive_editable': raw_data.get('possibly_sensitive_editable'), + 'quote_count': raw_data.get('quote_count'), + 'entities': raw_data.get('entities'), + 'reply_count': raw_data.get('reply_count'), + 'favorite_count': raw_data.get('favorite_count'), + 'favorited': raw_data.get('favorited'), + 'retweet_count': raw_data.get('retweet_count') + } + } + + +def build_user_data(raw_data: dict) -> dict: + return { + **raw_data, + 'rest_id': raw_data['id'], + 'is_blue_verified': raw_data.get('ext_is_blue_verified'), + 'legacy': { + 'created_at': raw_data.get('created_at'), + 'name': raw_data.get('name'), + 'screen_name': raw_data.get('screen_name'), + 'profile_image_url_https': raw_data.get('profile_image_url_https'), + 'location': raw_data.get('location'), + 'description': raw_data.get('description'), + 'entities': raw_data.get('entities'), + 'pinned_tweet_ids_str': raw_data.get('pinned_tweet_ids_str'), + 'verified': raw_data.get('verified'), + 'possibly_sensitive': raw_data.get('possibly_sensitive'), + 'can_dm': raw_data.get('can_dm'), + 'can_media_tag': raw_data.get('can_media_tag'), + 'want_retweets': raw_data.get('want_retweets'), + 'default_profile': raw_data.get('default_profile'), + 'default_profile_image': raw_data.get('default_profile_image'), + 'has_custom_timelines': raw_data.get('has_custom_timelines'), + 'followers_count': raw_data.get('followers_count'), + 'fast_followers_count': raw_data.get('fast_followers_count'), + 'normal_followers_count': raw_data.get('normal_followers_count'), + 'friends_count': raw_data.get('friends_count'), + 'favourites_count': raw_data.get('favourites_count'), + 'listed_count': raw_data.get('listed_count'), + 'media_count': raw_data.get('media_count'), + 'statuses_count': raw_data.get('statuses_count'), + 'is_translator': raw_data.get('is_translator'), + 'translator_type': raw_data.get('translator_type'), + 'withheld_in_countries': raw_data.get('withheld_in_countries'), + 'url': raw_data.get('url'), + 'profile_banner_url': raw_data.get('profile_banner_url') + } + } + + +def flatten_params(params: dict) -> dict: + flattened_params = {} + for key, value in params.items(): + if isinstance(value, (list, dict)): + value = json.dumps(value) + flattened_params[key] = value + return flattened_params + + +def b64_to_str(b64: str) -> str: + return base64.b64decode(b64).decode() + + +def find_entry_by_type(entries, type_filter): + for entry in entries: + if entry.get('type') == type_filter: + return entry + return None + + +FILTERS = Literal[ + 'media', + 'retweets', + 'native_video', + 'periscope', + 'vine', + 'images', + 'twimg', + 'links' +] + + +class SearchOptions(TypedDict): + exact_phrases: list[str] + or_keywords: list[str] + exclude_keywords: list[str] + hashtags: list[str] + from_user: str + to_user: str + mentioned_users: list[str] + filters: list[FILTERS] + exclude_filters: list[FILTERS] + urls: list[str] + since: str + until: str + positive: bool + negative: bool + question: bool + + +def build_query(text: str, options: SearchOptions) -> str: + """ + Builds a search query based on the given text and search options. + + Parameters + ---------- + text : str + The base text of the search query. + options : SearchOptions + A dictionary containing various search options. + - exact_phrases: list[str] + List of exact phrases to include in the search query. + - or_keywords: list[str] + List of keywords where tweets must contain at least + one of these keywords. + - exclude_keywords: list[str] + A list of keywords that the tweet must contain these keywords. + - hashtags: list[str] + List of hashtags to include in the search query. + - from_user: str + Specify a username. Only tweets from this user will + be includedin the search. + - to_user: str + Specify a username. Only tweets sent to this user will + be included in the search. + - mentioned_users: list[str] + List of usernames. Only tweets mentioning these users will + be included in the search. + - filters: list[FILTERS] + List of tweet filters to include in the search query. + - exclude_filters: list[FILTERS] + List of tweet filters to exclude from the search query. + - urls: list[str] + List of URLs. Only tweets containing these URLs will be + included in the search. + - since: str + Specify a date (formatted as 'YYYY-MM-DD'). Only tweets since + this date will be included in the search. + - until: str + Specify a date (formatted as 'YYYY-MM-DD'). Only tweets until + this date will be included in the search. + - positive: bool + Include positive sentiment in the search. + - negative: bool + Include negative sentiment in the search. + - question: bool + Search for tweets in questionable form. + + https://developer.twitter.com/en/docs/twitter-api/v1/rules-and-filtering/search-operators + + Returns + ------- + str + The constructed Twitter search query. + """ + if exact_phrases := options.get('exact_phrases'): + text += ' ' + ' '.join( + [f'"{i}"' for i in exact_phrases] + ) + + if or_keywords := options.get('or_keywords'): + text += ' ' + ' OR '.join(or_keywords) + + if exclude_keywords := options.get('exclude_keywords'): + text += ' ' + ' '.join( + [f'-"{i}"' for i in exclude_keywords] + ) + + if hashtags := options.get('hashtags'): + text += ' ' + ' '.join( + [f'#{i}' for i in hashtags] + ) + + if from_user := options.get('from_user'): + text +=f' from:{from_user}' + + if to_user := options.get('to_user'): + text += f' to:{to_user}' + + if mentioned_users := options.get('mentioned_users'): + text += ' ' + ' '.join( + [f'@{i}' for i in mentioned_users] + ) + + if filters := options.get('filters'): + text += ' ' + ' '.join( + [f'filter:{i}' for i in filters] + ) + + if exclude_filters := options.get('exclude_filters'): + text += ' ' + ' '.join( + [f'-filter:{i}' for i in exclude_filters] + ) + + if urls := options.get('urls'): + text += ' ' + ' '.join( + [f'url:{i}' for i in urls] + ) + + if since := options.get('since'): + text += f' since:{since}' + + if until := options.get('until'): + text += f' until:{until}' + + if options.get('positive') is True: + text += ' :)' + + if options.get('negative') is True: + text += ' :(' + + if options.get('question') is True: + text += ' ?' + + return text diff --git a/build/lib/twikit/x_client_transaction/__init__.py b/build/lib/twikit/x_client_transaction/__init__.py new file mode 100644 index 00000000..2643a6b0 --- /dev/null +++ b/build/lib/twikit/x_client_transaction/__init__.py @@ -0,0 +1,8 @@ +''' +This project includes code from the following open-source project: +https://github.com/iSarabjitDhiman/TweeterPy + +We deeply appreciate the efforts of the original author. +''' + +from .transaction import ClientTransaction diff --git a/build/lib/twikit/x_client_transaction/cubic_curve.py b/build/lib/twikit/x_client_transaction/cubic_curve.py new file mode 100644 index 00000000..13fcc2ab --- /dev/null +++ b/build/lib/twikit/x_client_transaction/cubic_curve.py @@ -0,0 +1,48 @@ +from typing import Union, List + + +class Cubic: + def __init__(self, curves: List[Union[float, int]]): + self.curves = curves + + def get_value(self, time: Union[float, int]): + start_gradient = end_gradient = start = mid = 0.0 + end = 1.0 + + if time <= 0.0: + if self.curves[0] > 0.0: + start_gradient = self.curves[1] / self.curves[0] + elif self.curves[1] == 0.0 and self.curves[2] > 0.0: + start_gradient = self.curves[3] / self.curves[2] + return start_gradient * time + + if time >= 1.0: + if self.curves[2] < 1.0: + end_gradient = (self.curves[3] - 1.0) / (self.curves[2] - 1.0) + elif self.curves[2] == 1.0 and self.curves[0] < 1.0: + end_gradient = (self.curves[1] - 1.0) / (self.curves[0] - 1.0) + return 1.0 + end_gradient * (time - 1.0) + + while start < end: + mid = (start + end) / 2 + x_est = self.calculate(self.curves[0], self.curves[2], mid) + if abs(time - x_est) < 0.00001: + return self.calculate(self.curves[1], self.curves[3], mid) + if x_est < time: + start = mid + else: + end = mid + return self.calculate(self.curves[1], self.curves[3], mid) + + @staticmethod + def calculate(a, b, m): + return 3.0 * a * (1 - m) * (1 - m) * m + 3.0 * b * (1 - m) * m * m + m * m * m + +# Example usage: +# cubic_instance = Cubic([0.1, 0.2, 0.3, 0.4]) +# value = cubic_instance.get_value(0.5) +# print(value) + + +if __name__ == "__main__": + pass diff --git a/build/lib/twikit/x_client_transaction/interpolate.py b/build/lib/twikit/x_client_transaction/interpolate.py new file mode 100644 index 00000000..4a879a2d --- /dev/null +++ b/build/lib/twikit/x_client_transaction/interpolate.py @@ -0,0 +1,23 @@ +from typing import Union, List + + +def interpolate(from_list: List[Union[float, int]], to_list: List[Union[float, int]], f: Union[float, int]): + if len(from_list) != len(to_list): + raise Exception( + f"Mismatched interpolation arguments {from_list}: {to_list}") + out = [] + for i in range(len(from_list)): + out.append(interpolate_num(from_list[i], to_list[i], f)) + return out + + +def interpolate_num(from_val: List[Union[float, int]], to_val: List[Union[float, int]], f: Union[float, int]): + if all([isinstance(number, (int, float)) for number in [from_val, to_val]]): + return from_val * (1 - f) + to_val * f + + if all([isinstance(number, bool) for number in [from_val, to_val]]): + return from_val if f < 0.5 else to_val + + +if __name__ == "__main__": + pass diff --git a/build/lib/twikit/x_client_transaction/rotation.py b/build/lib/twikit/x_client_transaction/rotation.py new file mode 100644 index 00000000..c27f5f4f --- /dev/null +++ b/build/lib/twikit/x_client_transaction/rotation.py @@ -0,0 +1,27 @@ +import math +from typing import Union + + +def convert_rotation_to_matrix(rotation: Union[float, int]): + rad = math.radians(rotation) + return [math.cos(rad), -math.sin(rad), math.sin(rad), math.cos(rad)] + + +def convertRotationToMatrix(degrees: Union[float, int]): + # first convert degrees to radians + radians = degrees * math.pi / 180 + # now we do this: + """ + [cos(r), -sin(r), 0] + [sin(r), cos(r), 0] + + in this order: + [cos(r), sin(r), -sin(r), cos(r), 0, 0] + """ + cos = math.cos(radians) + sin = math.sin(radians) + return [cos, sin, -sin, cos, 0, 0] + + +if __name__ == "__main__": + pass diff --git a/build/lib/twikit/x_client_transaction/transaction.py b/build/lib/twikit/x_client_transaction/transaction.py new file mode 100644 index 00000000..523dc7c0 --- /dev/null +++ b/build/lib/twikit/x_client_transaction/transaction.py @@ -0,0 +1,164 @@ +import re +import bs4 +import math +import time +import random +import base64 +import hashlib +import requests +from typing import Union, List +from functools import reduce +from .cubic_curve import Cubic +from .interpolate import interpolate +from .rotation import convert_rotation_to_matrix +from .utils import float_to_hex, is_odd, base64_encode, handle_x_migration + +ON_DEMAND_FILE_REGEX = re.compile( + r"""['|\"]{1}ondemand\.s['|\"]{1}:\s*['|\"]{1}([\w]*)['|\"]{1}""", flags=(re.VERBOSE | re.MULTILINE)) +INDICES_REGEX = re.compile( + r"""(\(\w{1}\[(\d{1,2})\],\s*16\))+""", flags=(re.VERBOSE | re.MULTILINE)) + + +class ClientTransaction: + ADDITIONAL_RANDOM_NUMBER = 3 + DEFAULT_KEYWORD = "obfiowerehiring" + DEFAULT_ROW_INDEX = None + DEFAULT_KEY_BYTES_INDICES = None + + def __init__(self): + self.home_page_response = None + + async def init(self, session, headers): + home_page_response = await handle_x_migration(session, headers) + + self.home_page_response = self.validate_response(home_page_response) + self.DEFAULT_ROW_INDEX, self.DEFAULT_KEY_BYTES_INDICES = await self.get_indices( + self.home_page_response, session, headers) + self.key = self.get_key(response=self.home_page_response) + self.key_bytes = self.get_key_bytes(key=self.key) + self.animation_key = self.get_animation_key( + key_bytes=self.key_bytes, response=self.home_page_response) + + async def get_indices(self, home_page_response, session, headers): + key_byte_indices = [] + response = self.validate_response( + home_page_response) or self.home_page_response + on_demand_file = ON_DEMAND_FILE_REGEX.search(str(response)) + if on_demand_file: + on_demand_file_url = f"https://abs.twimg.com/responsive-web/client-web/ondemand.s.{on_demand_file.group(1)}a.js" + on_demand_file_response = await session.request(method="GET", url=on_demand_file_url, headers=headers) + key_byte_indices_match = INDICES_REGEX.finditer( + str(on_demand_file_response.text)) + for item in key_byte_indices_match: + key_byte_indices.append(item.group(2)) + if not key_byte_indices: + raise Exception("Couldn't get KEY_BYTE indices") + key_byte_indices = list(map(int, key_byte_indices)) + return key_byte_indices[0], key_byte_indices[1:] + + def validate_response(self, response: Union[bs4.BeautifulSoup, requests.models.Response]): + if not isinstance(response, (bs4.BeautifulSoup, requests.models.Response)): + raise Exception("invalid response") + return response if isinstance(response, bs4.BeautifulSoup) else bs4.BeautifulSoup(response.content, 'lxml') + + def get_key(self, response=None): + response = self.validate_response(response) or self.home_page_response + # + element = response.select_one("[name='twitter-site-verification']") + if not element: + raise Exception("Couldn't get key from the page source") + return element.get("content") + + def get_key_bytes(self, key: str): + return list(base64.b64decode(bytes(key, 'utf-8'))) + + def get_frames(self, response=None): + # loading-x-anim-0...loading-x-anim-3 + response = self.validate_response(response) or self.home_page_response + return response.select("[id^='loading-x-anim']") + + def get_2d_array(self, key_bytes: List[Union[float, int]], response, frames: bs4.ResultSet = None): + if not frames: + frames = self.get_frames(response) + # return list(list(frames[key[5] % 4].children)[0].children)[1].get("d")[9:].split("C") + return [[int(x) for x in re.sub(r"[^\d]+", " ", item).strip().split()] for item in list(list(frames[key_bytes[5] % 4].children)[0].children)[1].get("d")[9:].split("C")] + + def solve(self, value, min_val, max_val, rounding: bool): + result = value * (max_val-min_val) / 255 + min_val + return math.floor(result) if rounding else round(result, 2) + + def animate(self, frames, target_time): + # from_color = f"#{''.join(['{:x}'.format(digit) for digit in frames[:3]])}" + # to_color = f"#{''.join(['{:x}'.format(digit) for digit in frames[3:6]])}" + # from_rotation = "rotate(0deg)" + # to_rotation = f"rotate({solve(frames[6], 60, 360, True)}deg)" + # easing_values = [solve(value, -1 if count % 2 else 0, 1, False) + # for count, value in enumerate(frames[7:])] + # easing = f"cubic-bezier({','.join([str(value) for value in easing_values])})" + # current_time = round(target_time / 10) * 10 + + from_color = [float(item) for item in [*frames[:3], 1]] + to_color = [float(item) for item in [*frames[3:6], 1]] + from_rotation = [0.0] + to_rotation = [self.solve(float(frames[6]), 60.0, 360.0, True)] + frames = frames[7:] + curves = [self.solve(float(item), is_odd(counter), 1.0, False) + for counter, item in enumerate(frames)] + cubic = Cubic(curves) + val = cubic.get_value(target_time) + color = interpolate(from_color, to_color, val) + color = [value if value > 0 else 0 for value in color] + rotation = interpolate(from_rotation, to_rotation, val) + matrix = convert_rotation_to_matrix(rotation[0]) + # str_arr = [format(int(round(color[i])), '02x') for i in range(len(color) - 1)] + # str_arr = [format(int(round(color[i])), 'x') for i in range(len(color) - 1)] + str_arr = [format(round(value), 'x') for value in color[:-1]] + for value in matrix: + rounded = round(value, 2) + if rounded < 0: + rounded = -rounded + hex_value = float_to_hex(rounded) + str_arr.append(f"0{hex_value}".lower() if hex_value.startswith( + ".") else hex_value if hex_value else '0') + str_arr.extend(["0", "0"]) + animation_key = re.sub(r"[.-]", "", "".join(str_arr)) + return animation_key + + def get_animation_key(self, key_bytes, response): + total_time = 4096 + # row_index, frame_time = [key_bytes[2] % 16, key_bytes[12] % 16 * (key_bytes[14] % 16) * (key_bytes[7] % 16)] + # row_index, frame_time = [key_bytes[2] % 16, key_bytes[2] % 16 * (key_bytes[42] % 16) * (key_bytes[45] % 16)] + + row_index = key_bytes[self.DEFAULT_ROW_INDEX] % 16 + frame_time = reduce(lambda num1, num2: num1*num2, + [key_bytes[index] % 16 for index in self.DEFAULT_KEY_BYTES_INDICES]) + arr = self.get_2d_array(key_bytes, response) + frame_row = arr[row_index] + + target_time = float(frame_time) / total_time + animation_key = self.animate(frame_row, target_time) + return animation_key + + def generate_transaction_id(self, method: str, path: str, response=None, key=None, animation_key=None, time_now=None): + time_now = time_now or math.floor( + (time.time() * 1000 - 1682924400 * 1000) / 1000) + time_now_bytes = [(time_now >> (i * 8)) & 0xFF for i in range(4)] + key = key or self.key or self.get_key(response) + key_bytes = self.get_key_bytes(key) + animation_key = animation_key or self.animation_key or self.get_animation_key( + key_bytes, response) + # hash_val = hashlib.sha256(f"{method}!{path}!{time_now}bird{animation_key}".encode()).digest() + hash_val = hashlib.sha256( + f"{method}!{path}!{time_now}{self.DEFAULT_KEYWORD}{animation_key}".encode()).digest() + # hash_bytes = [int(hash_val[i]) for i in range(len(hash_val))] + hash_bytes = list(hash_val) + random_num = random.randint(0, 255) + bytes_arr = [*key_bytes, *time_now_bytes, * + hash_bytes[:16], self.ADDITIONAL_RANDOM_NUMBER] + out = bytearray( + [random_num, *[item ^ random_num for item in bytes_arr]]) + return base64_encode(out).strip("=") + + +if __name__ == "__main__": + pass diff --git a/build/lib/twikit/x_client_transaction/utils.py b/build/lib/twikit/x_client_transaction/utils.py new file mode 100644 index 00000000..9fe0131e --- /dev/null +++ b/build/lib/twikit/x_client_transaction/utils.py @@ -0,0 +1,84 @@ +import re +import bs4 +import base64 +from typing import Union + + +async def handle_x_migration(session, headers): + home_page = None + migration_redirection_regex = re.compile( + r"""(http(?:s)?://(?:www\.)?(twitter|x){1}\.com(/x)?/migrate([/?])?tok=[a-zA-Z0-9%\-_]+)+""", re.VERBOSE) + response = await session.request(method="GET", url="https://x.com", headers=headers) + home_page = bs4.BeautifulSoup(response.content, 'lxml') + migration_url = home_page.select_one("meta[http-equiv='refresh']") + migration_redirection_url = re.search(migration_redirection_regex, str( + migration_url)) or re.search(migration_redirection_regex, str(response.content)) + if migration_redirection_url: + response = await session.request(method="GET", url=migration_redirection_url.group(0), headers=headers) + home_page = bs4.BeautifulSoup(response.content, 'lxml') + migration_form = home_page.select_one("form[name='f']") or home_page.select_one(f"form[action='https://x.com/x/migrate']") + if migration_form: + url = migration_form.attrs.get("action", "https://x.com/x/migrate") + "/?mx=2" + method = migration_form.attrs.get("method", "POST") + request_payload = {input_field.get("name"): input_field.get("value") for input_field in migration_form.select("input")} + response = await session.request(method=method, url=url, data=request_payload, headers=headers) + home_page = bs4.BeautifulSoup(response.content, 'lxml') + return home_page + + +def float_to_hex(x): + result = [] + quotient = int(x) + fraction = x - quotient + + while quotient > 0: + quotient = int(x / 16) + remainder = int(x - (float(quotient) * 16)) + + if remainder > 9: + result.insert(0, chr(remainder + 55)) + else: + result.insert(0, str(remainder)) + + x = float(quotient) + + if fraction == 0: + return ''.join(result) + + result.append('.') + + while fraction > 0: + fraction *= 16 + integer = int(fraction) + fraction -= float(integer) + + if integer > 9: + result.append(chr(integer + 55)) + else: + result.append(str(integer)) + + return ''.join(result) + + +def is_odd(num: Union[int, float]): + if num % 2: + return -1.0 + return 0.0 + + +def base64_encode(string): + string = string.encode() if isinstance(string, str) else string + return base64.b64encode(string).decode() + + +def base64_decode(input): + try: + data = base64.b64decode(input) + return data.decode() + except Exception: + # return bytes(input, "utf-8") + return list(bytes(input, "utf-8")) + + +if __name__ == "__main__": + pass diff --git a/example_project/test_twikit.py b/example_project/test_twikit.py new file mode 100644 index 00000000..ec644225 --- /dev/null +++ b/example_project/test_twikit.py @@ -0,0 +1,18 @@ +import asyncio +from twikit import Client + +async def main(): + client = Client() + + # Load your saved cookies if you have them + client.load_cookies('path/to/cookies.json') + + # Or login directly + # await client.login(auth_info_1='...', auth_info_2='...', password='...') + + # Get a tweet and check its bookmark count + tweet = await client.get_tweet_by_id('1519480761749016577') + print(f"Tweet bookmark count: {tweet.bookmark_count}") + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/examples/delete_all_tweets.py b/examples/delete_all_tweets.py deleted file mode 100644 index 0a169b81..00000000 --- a/examples/delete_all_tweets.py +++ /dev/null @@ -1,40 +0,0 @@ -import asyncio -import time - -from twikit import Client - -AUTH_INFO_1 = '...' -AUTH_INFO_2 = '...' -PASSWORD = '...' - -client = Client('en-US') - - -async def main(): - started_time = time.time() - - client.load_cookies('cookies.json') - client_user = await client.user() - - # Get all posts - all_tweets = [] - tweets = await client_user.get_tweets('Replies') - all_tweets += tweets - - while len(tweets) != 0: - tweets = await tweets.next() - all_tweets += tweets - - tasks = [] - for tweet in all_tweets: - tasks.append(tweet.delete()) - - gather = asyncio.gather(*tasks) - await gather - - print( - f'Deleted {len(all_tweets)} tweets\n' - f'Time: {time.time() - started_time}' - ) - -asyncio.run(main()) diff --git a/examples/dm_auto_reply.py b/examples/dm_auto_reply.py deleted file mode 100644 index 3ae60e15..00000000 --- a/examples/dm_auto_reply.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import os - -from twikit import Client -from twikit.streaming import Topic - -AUTH_INFO_1 = '' -AUTH_INFO_2 = '' -PASSWORD = '' - -client = Client() - - -async def main(): - if os.path.exists('cookies.json'): - client.load_cookies('cookies.json') - else: - await client.login( - auth_info_1=AUTH_INFO_1, - auth_info_2=AUTH_INFO_2, - password=PASSWORD - ) - client.save_cookies('cookies.json') - - - user_id = '1752362966203469824' # User ID of the DM partner to stream. - reply_message = 'Hello' - - topics = { - Topic.dm_update(f'{await client.user_id()}-{user_id}') - } - streaming_session = await client.get_streaming_session(topics) - - async for topic, payload in streaming_session: - if payload.dm_update: - if await client.user_id() == payload.dm_update.user_id: - continue - await client.send_dm(payload.dm_update.user_id, reply_message) - -asyncio.run(main()) - diff --git a/examples/download_tweet_media.py b/examples/download_tweet_media.py deleted file mode 100644 index 556449c5..00000000 --- a/examples/download_tweet_media.py +++ /dev/null @@ -1,23 +0,0 @@ -import asyncio -from twikit import Client - -AUTH_INFO_1 = '...' -AUTH_INFO_2 = '...' -PASSWORD = '...' - -client = Client('en-US') - - -async def main(): - tweet = await client.get_tweet_by_id('...') - - for i, media in enumerate(tweet.media): - media_url = media.get('media_url_https') - extension = media_url.rsplit('.', 1)[-1] - - response = await client.get(media_url, headers=client._base_headers) - - with open(f'media_{i}.{extension}', 'wb') as f: - f.write(response.content) - -asyncio.run(main()) diff --git a/examples/example.py b/examples/example.py deleted file mode 100644 index ddf0af14..00000000 --- a/examples/example.py +++ /dev/null @@ -1,137 +0,0 @@ -import asyncio - -from twikit import Client - -########################################### - -# Enter your account information -USERNAME = ... -EMAIL = ... -PASSWORD = ... - -client = Client('en-US') - -async def main(): - # Asynchronous client methods are coroutines and - # must be called using `await`. - await client.login( - auth_info_1=USERNAME, - auth_info_2=EMAIL, - password=PASSWORD - ) - - ########################################### - - # Search Latest Tweets - tweets = await client.search_tweet('query', 'Latest') - for tweet in tweets: - print(tweet) - # Search more tweets - more_tweets = await tweets.next() - - ########################################### - - # Search users - users = await client.search_user('query') - for user in users: - print(user) - # Search more users - more_users = await users.next() - - ########################################### - - # Get user by screen name - USER_SCREEN_NAME = 'example_user' - user = await client.get_user_by_screen_name(USER_SCREEN_NAME) - - # Access user attributes - print( - f'id: {user.id}', - f'name: {user.name}', - f'followers: {user.followers_count}', - f'tweets count: {user.statuses_count}', - sep='\n' - ) - - # Follow user - await user.follow() - # Unfollow user - await user.unfollow() - - # Get user tweets - user_tweets = await user.get_tweets('Tweets') - for tweet in user_tweets: - print(tweet) - # Get more tweets - more_user_tweets = await user_tweets.next() - - ########################################### - - # Send dm to a user - media_id = await client.upload_media('./image.png', 0) - await user.send_dm('dm text', media_id) - - # Get dm history - messages = await user.get_dm_history() - for message in messages: - print(message) - # Get more messages - more_messages = await messages.next() - - ########################################### - - # Get tweet by ID - TWEET_ID = '0000000000' - tweet = await client.get_tweet_by_id(TWEET_ID) - - # Access tweet attributes - print( - f'id: {tweet.id}', - f'text {tweet.text}', - f'favorite count: {tweet.favorite_count}', - f'media: {tweet.media}', - sep='\n' - ) - - # Favorite tweet - await tweet.favorite() - # Unfavorite tweet - await tweet.unfavorite() - # Retweet tweet - await tweet.retweet() - # Delete retweet - await tweet.delete_retweet() - - # Reply to tweet - await tweet.reply('tweet content') - - ########################################### - - # Create tweet with media - TWEET_TEXT = 'tweet text' - MEDIA_IDS = [ - await client.upload_media('./media1.png', 0), - await client.upload_media('./media2.png', 1), - await client.upload_media('./media3.png', 2) - ] - - client.create_tweet(TWEET_TEXT, MEDIA_IDS) - - # Create tweet with a poll - TWEET_TEXT = 'tweet text' - POLL_URI = await client.create_poll( - ['Option 1', 'Option 2', 'Option 3'] - ) - - await client.create_tweet(TWEET_TEXT, poll_uri=POLL_URI) - - ########################################### - - # Get news trends - trends = await client.get_trends('news') - for trend in trends: - print(trend) - - ########################################### - -asyncio.run(main()) diff --git a/examples/guest.py b/examples/guest.py deleted file mode 100644 index 4c48444c..00000000 --- a/examples/guest.py +++ /dev/null @@ -1,26 +0,0 @@ -import asyncio - -from twikit.guest import GuestClient - -client = GuestClient() - - -async def main(): - # Activate the client by generating a guest token. - await client.activate() - - # Get user by screen name - user = await client.get_user_by_screen_name('elonmusk') - print(user) - # Get user by ID - user = await client.get_user_by_id('44196397') - print(user) - - - user_tweets = await client.get_user_tweets('44196397') - print(user_tweets) - - tweet = await client.get_tweet_by_id('1519480761749016577') - print(tweet) - -asyncio.run(main()) diff --git a/examples/listen_for_new_tweets.py b/examples/listen_for_new_tweets.py deleted file mode 100644 index ec2a6619..00000000 --- a/examples/listen_for_new_tweets.py +++ /dev/null @@ -1,37 +0,0 @@ -import asyncio -from typing import NoReturn - -from twikit import Client, Tweet - -AUTH_INFO_1 = '...' -AUTH_INFO_2 = '...' -PASSWORD = '...' - -client = Client() - -USER_ID = '44196397' -CHECK_INTERVAL = 60 * 5 - - -def callback(tweet: Tweet) -> None: - print(f'New tweet posted : {tweet.text}') - - -async def get_latest_tweet() -> Tweet: - return await client.get_user_tweets(USER_ID, 'Replies')[0] - - -async def main() -> NoReturn: - before_tweet = await get_latest_tweet() - - while True: - await asyncio.sleep(CHECK_INTERVAL) - latest_tweet = await get_latest_tweet() - if ( - before_tweet != latest_tweet and - before_tweet.created_at_datetime < latest_tweet.created_at_datetime - ): - callable(latest_tweet) - before_tweet = latest_tweet - -asyncio.run(main()) diff --git a/lib/python3.11/site-packages/_distutils_hack/__init__.py b/lib/python3.11/site-packages/_distutils_hack/__init__.py new file mode 100644 index 00000000..94f71b99 --- /dev/null +++ b/lib/python3.11/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,239 @@ +# don't import any costly modules +import os +import sys + +report_url = ( + "https://github.com/pypa/setuptools/issues/new?template=distutils-deprecation.yml" +) + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + import warnings + + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils." + ) + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + import warnings + + warnings.warn( + "Setuptools is replacing distutils. Support for replacing " + "an already imported distutils is deprecated. In the future, " + "this condition will fail. " + f"Register concerns at {report_url}" + ) + mods = [ + name + for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') + if which == 'stdlib': + import warnings + + warnings.warn( + "Reliance on distutils from stdlib is deprecated. Users " + "must rely on setuptools to provide the distutils module. " + "Avoid importing distutils or import setuptools first, " + "and avoid setting SETUPTOOLS_USE_DISTUTILS=stdlib. " + f"Register concerns at {report_url}" + ) + return which == 'local' + + +def ensure_local_distutils(): + import importlib + + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + with shim(): + importlib.import_module('distutils') + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + assert 'setuptools._distutils.log' not in sys.modules + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class _TrivialRe: + def __init__(self, *patterns) -> None: + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + # optimization: only consider top level modules and those + # found in the CPython test suite. + if path is not None and not fullname.startswith('test.'): + return None + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + if self.is_cpython(): + return None + + import importlib + import importlib.abc + import importlib.util + + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return None + + class DistutilsLoader(importlib.abc.Loader): + def create_module(self, spec): + mod.__name__ = 'distutils' + return mod + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) + + @staticmethod + def is_cpython(): + """ + Suppress supplying distutils for CPython (build and tests). + Ref #2965 and #3007. + """ + return os.path.isfile('pybuilddir.txt') + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if sys.version_info >= (3, 12) or self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @classmethod + def pip_imported_during_build(cls): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + + return any( + cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) + ) + + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + + def spec_for_sensitive_tests(self): + """ + Ensure stdlib distutils when running select tests under CPython. + + python/cpython#91169 + """ + clear_distutils() + self.spec_for_distutils = lambda: None + + sensitive_tests = ( + [ + 'test.test_distutils', + 'test.test_peg_generator', + 'test.test_importlib', + ] + if sys.version_info < (3, 10) + else [ + 'test.test_distutils', + ] + ) + + +for name in DistutilsMetaFinder.sensitive_tests: + setattr( + DistutilsMetaFinder, + f'spec_for_{name}', + DistutilsMetaFinder.spec_for_sensitive_tests, + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self) -> None: + insert_shim() + + def __exit__(self, exc: object, value: object, tb: object) -> None: + _remove_shim() + + +def insert_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def _remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass + + +if sys.version_info < (3, 12): + # DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632) + remove_shim = _remove_shim diff --git a/lib/python3.11/site-packages/_distutils_hack/override.py b/lib/python3.11/site-packages/_distutils_hack/override.py new file mode 100644 index 00000000..2cc433a4 --- /dev/null +++ b/lib/python3.11/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/lib/python3.11/site-packages/_virtualenv.pth b/lib/python3.11/site-packages/_virtualenv.pth new file mode 100644 index 00000000..1c3ff998 --- /dev/null +++ b/lib/python3.11/site-packages/_virtualenv.pth @@ -0,0 +1 @@ +import _virtualenv \ No newline at end of file diff --git a/lib/python3.11/site-packages/_virtualenv.py b/lib/python3.11/site-packages/_virtualenv.py new file mode 100644 index 00000000..b61db307 --- /dev/null +++ b/lib/python3.11/site-packages/_virtualenv.py @@ -0,0 +1,103 @@ +"""Patches that are applied at runtime to the virtual environment.""" + +from __future__ import annotations + +import os +import sys + +VIRTUALENV_PATCH_FILE = os.path.join(__file__) + + +def patch_dist(dist): + """ + Distutils allows user to configure some arguments via a configuration file: + https://docs.python.org/3/install/index.html#distutils-configuration-files. + + Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up. + """ # noqa: D205 + # we cannot allow some install config as that would get packages installed outside of the virtual environment + old_parse_config_files = dist.Distribution.parse_config_files + + def parse_config_files(self, *args, **kwargs): + result = old_parse_config_files(self, *args, **kwargs) + install = self.get_option_dict("install") + + if "prefix" in install: # the prefix governs where to install the libraries + install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix) + for base in ("purelib", "platlib", "headers", "scripts", "data"): + key = f"install_{base}" + if key in install: # do not allow global configs to hijack venv paths + install.pop(key, None) + return result + + dist.Distribution.parse_config_files = parse_config_files + + +# Import hook that patches some modules to ignore configuration values that break package installation in case +# of virtual environments. +_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist" +# https://docs.python.org/3/library/importlib.html#setting-up-an-importer + + +class _Finder: + """A meta path finder that allows patching the imported distutils modules.""" + + fullname = None + + # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup, + # because there are gevent-based applications that need to be first to import threading by themselves. + # See https://github.com/pypa/virtualenv/issues/1895 for details. + lock = [] # noqa: RUF012 + + def find_spec(self, fullname, path, target=None): # noqa: ARG002 + if fullname in _DISTUTILS_PATCH and self.fullname is None: # noqa: PLR1702 + # initialize lock[0] lazily + if len(self.lock) == 0: + import threading # noqa: PLC0415 + + lock = threading.Lock() + # there is possibility that two threads T1 and T2 are simultaneously running into find_spec, + # observing .lock as empty, and further going into hereby initialization. However due to the GIL, + # list.append() operation is atomic and this way only one of the threads will "win" to put the lock + # - that every thread will use - into .lock[0]. + # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe + self.lock.append(lock) + + from functools import partial # noqa: PLC0415 + from importlib.util import find_spec # noqa: PLC0415 + + with self.lock[0]: + self.fullname = fullname + try: + spec = find_spec(fullname, path) + if spec is not None: + # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work + is_new_api = hasattr(spec.loader, "exec_module") + func_name = "exec_module" if is_new_api else "load_module" + old = getattr(spec.loader, func_name) + func = self.exec_module if is_new_api else self.load_module + if old is not func: + try: # noqa: SIM105 + setattr(spec.loader, func_name, partial(func, old)) + except AttributeError: + pass # C-Extension loaders are r/o such as zipimporter with <3.7 + return spec + finally: + self.fullname = None + return None + + @staticmethod + def exec_module(old, module): + old(module) + if module.__name__ in _DISTUTILS_PATCH: + patch_dist(module) + + @staticmethod + def load_module(old, name): + module = old(name) + if module.__name__ in _DISTUTILS_PATCH: + patch_dist(module) + return module + + +sys.meta_path.insert(0, _Finder()) diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/INSTALLER b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/LICENSE b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/LICENSE new file mode 100644 index 00000000..104eebf5 --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/METADATA b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/METADATA new file mode 100644 index 00000000..7b114cdb --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: anyio +Version: 4.8.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author-email: Alex Grönholm +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11" +Requires-Dist: idna>=2.8 +Requires-Dist: sniffio>=1.1 +Requires-Dist: typing_extensions>=4.5; python_version < "3.13" +Provides-Extra: trio +Requires-Dist: trio>=0.26.1; extra == "trio" +Provides-Extra: test +Requires-Dist: anyio[trio]; extra == "test" +Requires-Dist: coverage[toml]>=7; extra == "test" +Requires-Dist: exceptiongroup>=1.2.0; extra == "test" +Requires-Dist: hypothesis>=4.0; extra == "test" +Requires-Dist: psutil>=5.9; extra == "test" +Requires-Dist: pytest>=7.0; extra == "test" +Requires-Dist: trustme; extra == "test" +Requires-Dist: truststore>=0.9.1; python_version >= "3.10" and extra == "test" +Requires-Dist: uvloop>=0.21; (platform_python_implementation == "CPython" and platform_system != "Windows" and python_version < "3.14") and extra == "test" +Provides-Extra: doc +Requires-Dist: packaging; extra == "doc" +Requires-Dist: Sphinx~=7.4; extra == "doc" +Requires-Dist: sphinx_rtd_theme; extra == "doc" +Requires-Dist: sphinx-autodoc-typehints>=1.2.0; extra == "doc" + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/RECORD b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/RECORD new file mode 100644 index 00000000..d1481599 --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/RECORD @@ -0,0 +1,86 @@ +anyio-4.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.8.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.8.0.dist-info/METADATA,sha256=WjTz5zz2NgMStBtw4xDh8CDvf6YXgAOrA0nboFQkXEg,4630 +anyio-4.8.0.dist-info/RECORD,, +anyio-4.8.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91 +anyio-4.8.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.8.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=mVsWuQ6wxcPT9QUAxhz1Rg2u53PskaBJw4TXVXk63ZQ,4513 +anyio/__pycache__/__init__.cpython-311.pyc,, +anyio/__pycache__/from_thread.cpython-311.pyc,, +anyio/__pycache__/lowlevel.cpython-311.pyc,, +anyio/__pycache__/pytest_plugin.cpython-311.pyc,, +anyio/__pycache__/to_interpreter.cpython-311.pyc,, +anyio/__pycache__/to_process.cpython-311.pyc,, +anyio/__pycache__/to_thread.cpython-311.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-311.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-311.pyc,, +anyio/_backends/__pycache__/_trio.cpython-311.pyc,, +anyio/_backends/_asyncio.py,sha256=_6BDFDrEPI1aRPFDSPsUzyPGm-dnRgBPfEOdeCH1Ixg,92951 +anyio/_backends/_trio.py,sha256=7PNO_GPq8Dmo1kQmmE2z57dhnv4TUoinsEHt_PJg3oE,40405 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-311.pyc,, +anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-311.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-311.pyc,, +anyio/_core/__pycache__/_fileio.cpython-311.pyc,, +anyio/_core/__pycache__/_resources.cpython-311.pyc,, +anyio/_core/__pycache__/_signals.cpython-311.pyc,, +anyio/_core/__pycache__/_sockets.cpython-311.pyc,, +anyio/_core/__pycache__/_streams.cpython-311.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-311.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-311.pyc,, +anyio/_core/__pycache__/_tasks.cpython-311.pyc,, +anyio/_core/__pycache__/_testing.cpython-311.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-311.pyc,, +anyio/_core/_asyncio_selector_thread.py,sha256=53RhMHpFAexW0dQz2Rn8iy8zt931NXyEJITyILWVV_A,5626 +anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695 +anyio/_core/_exceptions.py,sha256=RlPRlwastdmfDPoskdXNO6SI8_l3fclA2wtW6cokU9I,3503 +anyio/_core/_fileio.py,sha256=r6QJmwn90vU0CyCDAWgGhCwc8cT26ofosaHl7Jo3LJU,22853 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905 +anyio/_core/_sockets.py,sha256=vQ5GnSDLHjEhHhV2yvsdiPs5wmPxxb1kRsv3RM5lbQk,26951 +anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804 +anyio/_core/_subprocesses.py,sha256=WquR6sHrnaZofaeqnL8U4Yv___msVW_WqivleLHK4zI,7760 +anyio/_core/_synchronization.py,sha256=DwUh8Tl6cG_UMVC_GyzPoC_U9BpfDfjMl9SINSxcZN4,20320 +anyio/_core/_tasks.py,sha256=f3CuWwo06cCZ6jaOv-JHFKWkgpgf2cvaF25Oh4augMA,4757 +anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118 +anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 +anyio/abc/__init__.py,sha256=c2OQbTCS_fQowviMXanLPh8m29ccwkXmpDr7uyNZYOo,2652 +anyio/abc/__pycache__/__init__.cpython-311.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-311.pyc,, +anyio/abc/__pycache__/_resources.cpython-311.pyc,, +anyio/abc/__pycache__/_sockets.cpython-311.pyc,, +anyio/abc/__pycache__/_streams.cpython-311.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-311.pyc,, +anyio/abc/__pycache__/_tasks.cpython-311.pyc,, +anyio/abc/__pycache__/_testing.cpython-311.pyc,, +anyio/abc/_eventloop.py,sha256=Wd_3C3hLm0ex5z_eHHWGqvLle2OKCSexJSZVnwQNGV4,9658 +anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 +anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262 +anyio/abc/_streams.py,sha256=GzST5Q2zQmxVzdrAqtbSyHNxkPlIC9AzeZJg_YyPAXw,6598 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=yJWbMwowvqjlAX4oJ3l9Is1w-zwynr2lX1Z02AWJqsY,3080 +anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 +anyio/from_thread.py,sha256=dbi5TUH45_Sg_jZ8Vv1NJWVohe0WeQ_OaCvXIKveAGg,17478 +anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=vjGhGRHD31OyMgJRFQrMvExhx3Ea8KbyDqYKmiSDdXA,6712 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-311.pyc,, +anyio/streams/__pycache__/buffered.cpython-311.pyc,, +anyio/streams/__pycache__/file.cpython-311.pyc,, +anyio/streams/__pycache__/memory.cpython-311.pyc,, +anyio/streams/__pycache__/stapled.cpython-311.pyc,, +anyio/streams/__pycache__/text.cpython-311.pyc,, +anyio/streams/__pycache__/tls.cpython-311.pyc,, +anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 +anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 +anyio/streams/memory.py,sha256=j8AyOExK4-UPaon_Xbhwax25Vqs0DwFg3ZXc-EIiHjY,10550 +anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 +anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 +anyio/streams/tls.py,sha256=m3AE2LVSpoRHSIwSoSCupiOVL54EvOFoY3CcwTxcZfg,12742 +anyio/to_interpreter.py,sha256=QhTFaSdyUjxpuN_wBJWPWyh8N6kKV4qhkn71Op84AEc,6624 +anyio/to_process.py,sha256=ZvruelRM-HNmqDaql4sdNODg2QD_uSlwSCxnV4OhsfQ,9595 +anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/WHEEL b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/WHEEL new file mode 100644 index 00000000..ae527e7d --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.6.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/entry_points.txt b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/entry_points.txt new file mode 100644 index 00000000..44dd9bdc --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/lib/python3.11/site-packages/anyio-4.8.0.dist-info/top_level.txt b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/top_level.txt new file mode 100644 index 00000000..c77c069e --- /dev/null +++ b/lib/python3.11/site-packages/anyio-4.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/lib/python3.11/site-packages/anyio/__init__.py b/lib/python3.11/site-packages/anyio/__init__.py new file mode 100644 index 00000000..09831259 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/__init__.py @@ -0,0 +1,77 @@ +from __future__ import annotations + +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerIntepreter as BrokenWorkerIntepreter +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket +from ._core._sockets import ( + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, +) +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import wait_readable as wait_readable +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._sockets import wait_writable as wait_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter +from ._core._synchronization import ( + CapacityLimiterStatistics as CapacityLimiterStatistics, +) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio."): + __value.__module__ = __name__ + +del __value diff --git a/lib/python3.11/site-packages/anyio/_backends/__init__.py b/lib/python3.11/site-packages/anyio/_backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/python3.11/site-packages/anyio/_backends/_asyncio.py b/lib/python3.11/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 00000000..76a400c1 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2807 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import math +import os +import socket +import sys +import threading +import weakref +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager, suppress +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import CodeType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Optional, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + abc, +) +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, + iterate_exceptions, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..abc._eventloop import StrOrBytesPath +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + if self._state is not _State.INITIALIZED: + return + try: + loop = self._loop + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt() + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + # The task coro should never be None here, as we never add finished tasks to the + # task list + coro = task.get_coro() + assert coro is not None + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +def is_anyio_cancellation(exc: CancelledError) -> bool: + # Sometimes third party frameworks catch a CancelledError and raise a new one, so as + # a workaround we have to look at the previous ones in __context__ too for a + # matching cancel message + while True: + if ( + exc.args + and isinstance(exc.args[0], str) + and exc.args[0].startswith("Cancelled by cancel scope ") + ): + return True + + if isinstance(exc.__context__, CancelledError): + exc = exc.__context__ + continue + + return False + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + if sys.version_info >= (3, 11): + self._pending_uncancellations: int | None = 0 + else: + self._pending_uncancellations = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + # If using an eager task factory, the parent scope may not even contain + # the host task + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.discard(host_task) + + self._timeout() + self._active = True + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + del exc_tb + + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + try: + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest visible, cancelled parent + # scope if necessary + self._restart_cancellation_in_parent() + + # We only swallow the exception iff it was an AnyIO CancelledError, either + # directly as exc_val or inside an exception group and there are no cancelled + # parent cancel scopes visible to us here + if self._cancel_called and not self._parent_cancellation_is_visible_to_us: + # For each level-cancel() call made on the host task, call uncancel() + while self._pending_uncancellations: + self._host_task.uncancel() + self._pending_uncancellations -= 1 + + # Update cancelled_caught and check for exceptions we must not swallow + cannot_swallow_exc_val = False + if exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError) and is_anyio_cancellation( + exc + ): + self._cancelled_caught = True + else: + cannot_swallow_exc_val = True + + return self._cancelled_caught and not cannot_swallow_exc_val + else: + if self._pending_uncancellations: + assert self._parent_scope is not None + assert self._parent_scope._pending_uncancellations is not None + self._parent_scope._pending_uncancellations += ( + self._pending_uncancellations + ) + self._pending_uncancellations = 0 + + return False + finally: + self._host_task = None + del exc_val + + @property + def _effectively_cancelled(self) -> bool: + cancel_scope: CancelScope | None = self + while cancel_scope is not None: + if cancel_scope._cancel_called: + return True + + if cancel_scope.shield: + return False + + cancel_scope = cancel_scope._parent_scope + + return False + + @property + def _parent_cancellation_is_visible_to_us(self) -> bool: + return ( + self._parent_scope is not None + and not self.shield + and self._parent_scope._effectively_cancelled + ) + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + should_retry = True + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + task.cancel(f"Cancelled by cancel scope {id(origin):x}") + if ( + task is origin._host_task + and origin._pending_uncancellations is not None + ): + origin._pending_uncancellations += 1 + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def cancel(self) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope", "__weakref__" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + if not self._future.cancelled(): + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +if sys.version_info >= (3, 12): + _eager_task_factory_code: CodeType | None = asyncio.eager_task_factory.__code__ +else: + _eager_task_factory_code = None + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + self._on_completed_fut: asyncio.Future[None] | None = None + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + try: + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + loop = get_running_loop() + try: + if self._tasks: + with CancelScope() as wait_scope: + while self._tasks: + self._on_completed_fut = loop.create_future() + + try: + await self._on_completed_fut + except CancelledError as exc: + # Shield the scope against further cancellation attempts, + # as they're not productive (#695) + wait_scope.shield = True + self.cancel_scope.cancel() + + # Set exc_val from the cancellation exception if it was + # previously unset. However, we should not replace a native + # cancellation exception with one raise by a cancel scope. + if exc_val is None or ( + isinstance(exc_val, CancelledError) + and not is_anyio_cancellation(exc) + ): + exc_val = exc + + self._on_completed_fut = None + else: + # If there are no child tasks to wait on, run at least one checkpoint + # anyway + await AsyncIOBackend.cancel_shielded_checkpoint() + + self._active = False + if self._exceptions: + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) + elif exc_val: + raise exc_val + except BaseException as exc: + if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): + return True + + raise + + return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + finally: + del exc_val, exc_tb, self._exceptions + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + if self._on_completed_fut is not None and not self._tasks: + try: + self._on_completed_fut.set_result(None) + except asyncio.InvalidStateError: + pass + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + # The future can only be in the cancelled state if the host task was + # cancelled, so return immediately instead of adding one more + # CancelledError to the exceptions list + if task_status_future is not None and task_status_future.cancelled(): + return + + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._effectively_cancelled: + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + loop = asyncio.get_running_loop() + if ( + (factory := loop.get_task_factory()) + and getattr(factory, "__code__", None) is _eager_task_factory_code + and (closure := getattr(factory, "__closure__", None)) + ): + custom_task_constructor = closure[0].cell_contents + task = custom_task_constructor(coro, loop=loop, name=name) + else: + task = create_task(coro, name=name) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + task.add_done_callback(task_done) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + self.queue.task_done() + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + AsyncIOBackend.run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + (self._call_func, func, args, kwargs, future), + self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.set_exception(ClosedResourceError()) + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True) as scope: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + scope.shield = False + try: + await self.wait() + except BaseException: + scope.shield = True + self.kill() + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + is_at_eof: bool = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + # ProactorEventloop sometimes sends bytearray instead of bytes + self.read_queue.append(bytes(data)) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.is_at_eof = True + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + and not self._protocol.is_at_eof + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + else: + await AsyncIOBackend.checkpoint() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception from None + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[int, asyncio.Event]] = RunVar("read_events") +_write_events: RunVar[dict[int, asyncio.Event]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self._owner_task: asyncio.Task | None = None + self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() + + async def acquire(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._owner_task = task + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + fut: asyncio.Future[None] = asyncio.Future() + item = task, fut + self._waiters.append(item) + try: + await fut + except CancelledError: + self._waiters.remove(item) + if self._owner_task is task: + self.release() + + raise + + self._waiters.remove(item) + + def acquire_nowait(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + self._owner_task = task + return + + if self._owner_task is task: + raise RuntimeError("Attempted to acquire an already held Lock") + + raise WouldBlock + + def locked(self) -> bool: + return self._owner_task is not None + + def release(self) -> None: + if self._owner_task != current_task(): + raise RuntimeError("The current task is not holding this lock") + + for task, fut in self._waiters: + if not fut.cancelled(): + self._owner_task = task + fut.set_result(None) + return + + self._owner_task = None + + def statistics(self) -> LockStatistics: + task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None + return LockStatistics(self.locked(), task_info, len(self._waiters)) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + super().__init__(initial_value, max_value=max_value) + self._value = initial_value + self._max_value = max_value + self._fast_acquire = fast_acquire + self._waiters: deque[asyncio.Future[None]] = deque() + + async def acquire(self) -> None: + if self._value > 0 and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._value -= 1 + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + fut: asyncio.Future[None] = asyncio.Future() + self._waiters.append(fut) + try: + await fut + except CancelledError: + try: + self._waiters.remove(fut) + except ValueError: + self.release() + + raise + + def acquire_nowait(self) -> None: + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + for fut in self._waiters: + if not fut.cancelled(): + fut.set_result(None) + self._waiters.remove(fut) + return + + self._value += 1 + + @property + def value(self) -> int: + return self._value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + if value < 1: + raise ValueError("total_tokens must be >= 1") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's " + "tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's tokens" + ) from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +class AsyncIOTaskInfo(TaskInfo): + def __init__(self, task: asyncio.Task): + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + coro = task.get_coro() + assert coro is not None, "created TaskInfo from a completed Task" + super().__init__(id(task), parent_id, task.get_name(), coro) + self._task = weakref.ref(task) + + def has_pending_cancellation(self) -> bool: + if not (task := self._task()): + # If the task isn't around anymore, it won't have a pending cancellation + return False + + if task._must_cancel: # type: ignore[attr-defined] + return True + elif ( + isinstance(task._fut_waiter, asyncio.Future) # type: ignore[attr-defined] + and task._fut_waiter.cancelled() # type: ignore[attr-defined] + ): + return True + + if task_state := _task_states.get(task): + if cancel_scope := task_state.cancel_scope: + return cancel_scope._effectively_cancelled + + return False + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + import uvloop + + loop_factory = uvloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + async def _run_tests_and_fixtures( + self, + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + from _pytest.outcomes import OutcomeException + + with receive_stream, self._send_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except CancelledError as exc: + if not future.cancelled(): + future.cancel(*exc.args) + + raise + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + if not isinstance(exc, (Exception, OutcomeException)): + raise + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", None) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + import uvloop + + loop_factory = uvloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + if (task := current_task()) is None: + return math.inf + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( # type: ignore[return] + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future = asyncio.Future[T_Retval]() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback(worker.stop) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper(scope: CancelScope) -> T_Retval: + __tracebackhide__ = True + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + scope._tasks.discard(task) + + loop = cast(AbstractEventLoop, token) + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "asyncio") + wrapper = task_wrapper(threadlocals.current_cancel_scope) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, wrapper, loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + sniffio.current_async_library_cvar.set("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = cast(AbstractEventLoop, token) + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + await cls.checkpoint() + if isinstance(command, PathLike): + command = os.fspath(command) + + if isinstance(command, (str, bytes)): + process = await asyncio.create_subprocess_shell( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + await cls.checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if not isinstance(obj, int): + obj = obj.fileno() + + if read_events.get(obj): + raise BusyResourceError("reading from") + + loop = get_running_loop() + event = asyncio.Event() + try: + loop.add_reader(obj, event.set) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_reader(obj, event.set) + remove_reader = selector.remove_reader + else: + remove_reader = loop.remove_reader + + read_events[obj] = event + try: + await event.wait() + finally: + remove_reader(obj) + del read_events[obj] + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + await cls.checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if not isinstance(obj, int): + obj = obj.fileno() + + if write_events.get(obj): + raise BusyResourceError("writing to") + + loop = get_running_loop() + event = asyncio.Event() + try: + loop.add_writer(obj, event.set) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_writer(obj, event.set) + remove_writer = selector.remove_writer + else: + remove_writer = loop.remove_writer + + write_events[obj] = event + try: + await event.wait() + finally: + del write_events[obj] + remove_writer(obj) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/lib/python3.11/site-packages/anyio/_backends/_trio.py b/lib/python3.11/site-packages/anyio/_backends/_trio.py new file mode 100644 index 00000000..32ae8ace --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1334 @@ +from __future__ import annotations + +import array +import math +import os +import socket +import sys +import types +import weakref +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Generic, + NoReturn, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + WouldBlock, + abc, +) +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend, StrOrBytesPath +from ..streams.memory import MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import HasFileno + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> None: + self.__original.cancel() + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value] + except BaseExceptionGroup as exc: + if not exc.split(trio.Cancelled)[1]: + raise trio.Cancelled._create() from exc + + raise + finally: + del exc_val, exc_tb + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + trio.from_thread.run_sync( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self.__original = trio.Lock() + + @staticmethod + def _convert_runtime_error_msg(exc: RuntimeError) -> None: + if exc.args == ("attempt to re-acquire an already held Lock",): + exc.args = ("Attempted to acquire an already held Lock",) + + async def acquire(self) -> None: + if not self._fast_acquire: + try: + await self.__original.acquire() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def locked(self) -> bool: + return self.__original.locked() + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> LockStatistics: + orig_statistics = self.__original.statistics() + owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None + return LockStatistics( + orig_statistics.locked, owner, orig_statistics.tasks_waiting + ) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self.__original = trio.Semaphore(initial_value, max_value=max_value) + + async def acquire(self) -> None: + if not self._fast_acquire: + await self.__original.acquire() + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + + @property + def max_value(self) -> int | None: + return self.__original.max_value + + @property + def value(self) -> int: + return self.__original.value + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> SemaphoreStatistics: + orig_statistics = self.__original.statistics() + return SemaphoreStatistics(orig_statistics.tasks_waiting) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioTaskInfo(TaskInfo): + def __init__(self, task: trio.lowlevel.Task): + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + super().__init__(id(task), parent_id, task.name, task.coro) + self._task = weakref.proxy(task) + + def has_pending_cancellation(self) -> bool: + try: + return self._task._cancel_status.effectively_cancelled + except ReferenceError: + # If the task is no longer around, it surely doesn't have a cancellation + # pending + return False + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run(func, *args) + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run_sync(func, *args) + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + def convert_item(item: StrOrBytesPath) -> str: + str_or_bytes = os.fspath(item) + if isinstance(str_or_bytes, str): + return str_or_bytes + else: + return os.fsdecode(str_or_bytes) + + if isinstance(command, (str, bytes, PathLike)): + process = await trio.lowlevel.open_process( + convert_item(command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=True, + **kwargs, + ) + else: + process = await trio.lowlevel.open_process( + [convert_item(item) for item in command], + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=False, + **kwargs, + ) + + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: HasFileno | int) -> None: + try: + await wait_readable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_writable(cls, obj: HasFileno | int) -> None: + try: + await wait_writable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + return TrioTaskInfo(task) + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TrioTaskInfo(root_task)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append(TrioTaskInfo(task)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/lib/python3.11/site-packages/anyio/_core/__init__.py b/lib/python3.11/site-packages/anyio/_core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py b/lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py new file mode 100644 index 00000000..f4d18cf0 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import asyncio +import socket +import threading +from collections.abc import Callable +from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +_selector_lock = threading.Lock() +_selector: Selector | None = None + + +class Selector: + def __init__(self) -> None: + self._thread = threading.Thread(target=self.run, name="AnyIO socket selector") + self._selector = DefaultSelector() + self._send, self._receive = socket.socketpair() + self._send.setblocking(False) + self._receive.setblocking(False) + # This somewhat reduces the amount of memory wasted queueing up data + # for wakeups. With these settings, maximum number of 1-byte sends + # before getting BlockingIOError: + # Linux 4.8: 6 + # macOS (darwin 15.5): 1 + # Windows 10: 525347 + # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send + # blocking, even on non-blocking sockets, so don't do that.) + self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1) + self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1) + # On Windows this is a TCP socket so this might matter. On other + # platforms this fails b/c AF_UNIX sockets aren't actually TCP. + try: + self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + except OSError: + pass + + self._selector.register(self._receive, EVENT_READ) + self._closed = False + + def start(self) -> None: + self._thread.start() + threading._register_atexit(self._stop) # type: ignore[attr-defined] + + def _stop(self) -> None: + global _selector + self._closed = True + self._notify_self() + self._send.close() + self._thread.join() + self._selector.unregister(self._receive) + self._receive.close() + self._selector.close() + _selector = None + assert ( + not self._selector.get_map() + ), "selector still has registered file descriptors after shutdown" + + def _notify_self(self) -> None: + try: + self._send.send(b"\x00") + except BlockingIOError: + pass + + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)}) + else: + if EVENT_READ in key.data: + raise ValueError( + "this file descriptor is already registered for reading" + ) + + key.data[EVENT_READ] = loop, callback + self._selector.modify(fd, key.events | EVENT_READ, key.data) + + self._notify_self() + + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)}) + else: + if EVENT_WRITE in key.data: + raise ValueError( + "this file descriptor is already registered for writing" + ) + + key.data[EVENT_WRITE] = loop, callback + self._selector.modify(fd, key.events | EVENT_WRITE, key.data) + + self._notify_self() + + def remove_reader(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_READ: + del key.data[EVENT_READ] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def remove_writer(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_WRITE: + del key.data[EVENT_WRITE] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def run(self) -> None: + while not self._closed: + for key, events in self._selector.select(): + if key.fileobj is self._receive: + try: + while self._receive.recv(4096): + pass + except BlockingIOError: + pass + + continue + + if events & EVENT_READ: + loop, callback = key.data[EVENT_READ] + self.remove_reader(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + if events & EVENT_WRITE: + loop, callback = key.data[EVENT_WRITE] + self.remove_writer(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + +def get_selector() -> Selector: + global _selector + + with _selector_lock: + if _selector is None: + _selector = Selector() + _selector.start() + + return _selector diff --git a/lib/python3.11/site-packages/anyio/_core/_eventloop.py b/lib/python3.11/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 00000000..6dcb4589 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +import sniffio + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() +loaded_backends: dict[str, type[AsyncBackend]] = {} + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + threadlocals.current_async_backend = backend_class + threadlocals.current_token = token + try: + yield + finally: + del threadlocals.current_async_backend + del threadlocals.current_token + + +def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + # We use our own dict instead of sys.modules to get the already imported back-end + # class because the appropriate modules in sys.modules could potentially be only + # partially initialized + try: + return loaded_backends[asynclib_name] + except KeyError: + module = import_module(f"anyio._backends._{asynclib_name}") + loaded_backends[asynclib_name] = module.backend_class + return module.backend_class diff --git a/lib/python3.11/site-packages/anyio/_core/_exceptions.py b/lib/python3.11/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 00000000..16b94482 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,126 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator +from textwrap import dedent +from typing import Any + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BrokenWorkerIntepreter(Exception): + """ + Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is + raised in the subinterpreter. + """ + + def __init__(self, excinfo: Any): + # This was adapted from concurrent.futures.interpreter.ExecutionFailed + msg = excinfo.formatted + if not msg: + if excinfo.type and excinfo.msg: + msg = f"{excinfo.type.__name__}: {excinfo.msg}" + else: + msg = excinfo.type.__name__ or excinfo.msg + + super().__init__(msg) + self.excinfo = excinfo + + def __str__(self) -> str: + try: + formatted = self.excinfo.errdisplay + except Exception: + return super().__str__() + else: + return dedent( + f""" + {super().__str__()} + + Uncaught in the interpreter: + + {formatted} + """.strip() + ) + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception diff --git a/lib/python3.11/site-packages/anyio/_core/_fileio.py b/lib/python3.11/site-packages/anyio/_core/_fileio.py new file mode 100644 index 00000000..4e34f2ad --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,729 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import ( + AsyncIterator, + Callable, + Iterable, + Iterator, + Sequence, +) +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Some methods may be unavailable or have limited functionality, based on the Python + version: + + * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later) + * :meth:`~pathlib.Path.full_match` (available on Python 3.13 or later) + * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later) + * :meth:`~pathlib.Path.match` (the ``case_sensitive`` paramater is only available on + Python 3.13 or later) + * :meth:`~pathlib.Path.move` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.relative_to` (the ``walk_up`` parameter is only available on + Python 3.12 or later) + * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later) + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_junction` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.is_socket` + * :meth:`~pathlib.Path.is_symlink` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.resolve` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.symlink_to` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.walk` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + if sys.version_info >= (3, 13): + parser = pathlib.Path.parser + + @classmethod + def from_uri(cls, uri: str) -> Path: + return Path(pathlib.Path.from_uri(uri)) + + def full_match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.full_match(path_pattern, case_sensitive=case_sensitive) + + def match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.match(path_pattern, case_sensitive=case_sensitive) + else: + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + if sys.version_info >= (3, 14): + + async def copy( + self, + target: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + dirs_exist_ok: bool = False, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy, + follow_symlinks=follow_symlinks, + dirs_exist_ok=dirs_exist_ok, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, target)) + + async def copy_into( + self, + target_dir: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + dirs_exist_ok: bool = False, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy_into, + follow_symlinks=follow_symlinks, + dirs_exist_ok=dirs_exist_ok, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, target_dir)) + + async def move(self, target: str | os.PathLike[str]) -> Path: + # Upstream does not handle anyio.Path properly as a PathLike + target = pathlib.Path(target) + return Path(await to_thread.run_sync(self._path.move, target)) + + async def move_into( + self, + target_dir: str | os.PathLike[str], + ) -> Path: + return Path(await to_thread.run_sync(self._path.move_into, target_dir)) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + if sys.version_info >= (3, 12): + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + def iterdir(self) -> AsyncIterator[Path]: + gen = self._path.iterdir() + return _PathIterator(gen) + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + return Path(self._path.relative_to(*other, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/lib/python3.11/site-packages/anyio/_core/_resources.py b/lib/python3.11/site-packages/anyio/_core/_resources.py new file mode 100644 index 00000000..b9a5344a --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/lib/python3.11/site-packages/anyio/_core/_signals.py b/lib/python3.11/site-packages/anyio/_core/_signals.py new file mode 100644 index 00000000..f3451d30 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_signals.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from contextlib import AbstractContextManager +from signal import Signals + +from ._eventloop import get_async_backend + + +def open_signal_receiver( + *signals: Signals, +) -> AbstractContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/lib/python3.11/site-packages/anyio/_core/_sockets.py b/lib/python3.11/site-packages/anyio/_core/_sockets.py new file mode 100644 index 00000000..a822d060 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,787 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import TYPE_CHECKING, Any, Literal, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +if sys.version_info < (3, 13): + from typing_extensions import deprecated +else: + from warnings import deprecated + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + addr_obj = None + + if addr_obj is not None: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + else: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + + oserrors: list[OSError] = [] + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + listeners: list[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddr: tuple[str, int] | tuple[str, int, int, int] + for fam, kind, *_, sockaddr in sorted(set(gai_res)): + # Workaround for an uvloop bug where we don't get the correct scope ID for + # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to + # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 + if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: + continue + + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # Workaround for #554 + if "%" in sockaddr[0]: + addr, scope_id = sockaddr[0].split("%", 1) + sockaddr = (addr, sockaddr[1], 0, int(scope_id)) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.create_tcp_listener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +@deprecated("This function is deprecated; use `wait_readable` instead") +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_readable` instead. + + Wait until the given socket has data to be read. + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_async_backend().wait_readable(sock.fileno()) + + +@deprecated("This function is deprecated; use `wait_writable` instead") +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_writable` instead. + + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_async_backend().wait_writable(sock.fileno()) + + +def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object has data to be read. + + On Unix systems, ``obj`` must either be an integer file descriptor, or else an + object with a ``.fileno()`` method which returns an integer file descriptor. Any + kind of file descriptor can be passed, though the exact semantics will depend on + your kernel. For example, this probably won't do anything useful for on-disk files. + + On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an + object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File + descriptors aren't supported, and neither are handles that refer to anything besides + a ``SOCKET``. + + On backends where this functionality is not natively provided (asyncio + ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread + which is set to shut down when the interpreter shuts down. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become readable + + """ + return get_async_backend().wait_readable(obj) + + +def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object can be written to. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become writable + + .. seealso:: See the documentation of :func:`wait_readable` for the definition of + ``obj`` and notes on backend compatibility. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + """ + return get_async_backend().wait_writable(obj) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | None + if path is not None: + path_str = os.fsdecode(path) + + # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call + if not path_str.startswith("\0"): + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in ( + errno.ENOENT, + errno.ENOTDIR, + errno.EBADF, + errno.ELOOP, + ): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket diff --git a/lib/python3.11/site-packages/anyio/_core/_streams.py b/lib/python3.11/site-packages/anyio/_core/_streams.py new file mode 100644 index 00000000..6a9814e5 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/lib/python3.11/site-packages/anyio/_core/_subprocesses.py b/lib/python3.11/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 00000000..7ba41a5b --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +import sys +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, Union, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +async def run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + input: bytes | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (Python >= 3.9, POSIX only) + :param group: effective group to run the process as (Python >= 3.9, POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, + POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (Python >= 3.9, POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + async with await open_process( + command, + stdin=PIPE if input else DEVNULL, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + user=user, + group=group, + extra_groups=extra_groups, + umask=umask, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (POSIX only) + :param group: effective group to run the process as (POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (POSIX only) + :return: an asynchronous process object + + """ + kwargs: dict[str, Any] = {} + if user is not None: + kwargs["user"] = user + + if group is not None: + kwargs["group"] = group + + if extra_groups is not None: + kwargs["extra_groups"] = group + + if umask >= 0: + kwargs["umask"] = umask + + return await get_async_backend().open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + **kwargs, + ) diff --git a/lib/python3.11/site-packages/anyio/_core/_synchronization.py b/lib/python3.11/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 00000000..a6331328 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,732 @@ +from __future__ import annotations + +import math +from collections import deque +from dataclasses import dataclass +from types import TracebackType + +from sniffio import AsyncLibraryNotFoundError + +from ..lowlevel import checkpoint +from ._eventloop import get_async_backend +from ._exceptions import BusyResourceError +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except AsyncLibraryNotFoundError: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + _is_set: bool = False + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + if self._is_set: + self._internal_event.set() + + return self._internal_event + + def set(self) -> None: + if self._internal_event is None: + self._is_set = True + else: + self._event.set() + + def is_set(self) -> bool: + if self._internal_event is None: + return self._is_set + + return self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + try: + return get_async_backend().create_lock(fast_acquire=fast_acquire) + except AsyncLibraryNotFoundError: + return LockAdapter(fast_acquire=fast_acquire) + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Release the lock.""" + raise NotImplementedError + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + raise NotImplementedError + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class LockAdapter(Lock): + _internal_lock: Lock | None = None + + def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False): + self._fast_acquire = fast_acquire + + @property + def _lock(self) -> Lock: + if self._internal_lock is None: + self._internal_lock = get_async_backend().create_lock( + fast_acquire=self._fast_acquire + ) + + return self._internal_lock + + async def __aenter__(self) -> None: + await self._lock.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self._internal_lock is not None: + self._internal_lock.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await self._lock.acquire() + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + + def release(self) -> None: + """Release the lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._lock.locked() + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + + """ + if self._internal_lock is None: + return LockStatistics(False, None, 0) + + return self._internal_lock.statistics() + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + try: + return get_async_backend().create_semaphore( + initial_value, max_value=max_value, fast_acquire=fast_acquire + ) + except AsyncLibraryNotFoundError: + return SemaphoreAdapter(initial_value, max_value=max_value) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._fast_acquire = fast_acquire + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Increment the semaphore value.""" + raise NotImplementedError + + @property + def value(self) -> int: + """The current value of the semaphore.""" + raise NotImplementedError + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + raise NotImplementedError + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class SemaphoreAdapter(Semaphore): + _internal_semaphore: Semaphore | None = None + + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> SemaphoreAdapter: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self._initial_value = initial_value + self._max_value = max_value + + @property + def _semaphore(self) -> Semaphore: + if self._internal_semaphore is None: + self._internal_semaphore = get_async_backend().create_semaphore( + self._initial_value, max_value=self._max_value + ) + + return self._internal_semaphore + + async def acquire(self) -> None: + await self._semaphore.acquire() + + def acquire_nowait(self) -> None: + self._semaphore.acquire_nowait() + + def release(self) -> None: + self._semaphore.release() + + @property + def value(self) -> int: + if self._internal_semaphore is None: + return self._initial_value + + return self._semaphore.value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + if self._internal_semaphore is None: + return SemaphoreStatistics(tasks_waiting=0) + + return self._semaphore.statistics() + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except AsyncLibraryNotFoundError: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._guarded = False diff --git a/lib/python3.11/site-packages/anyio/_core/_tasks.py b/lib/python3.11/site-packages/anyio/_core/_tasks.py new file mode 100644 index 00000000..fe490151 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self) -> None: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + + """ + return get_async_backend().create_task_group() diff --git a/lib/python3.11/site-packages/anyio/_core/_testing.py b/lib/python3.11/site-packages/anyio/_core/_testing.py new file mode 100644 index 00000000..9e28b227 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_testing.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any, cast + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def has_pending_cancellation(self) -> bool: + """ + Return ``True`` if the task has a cancellation pending, ``False`` otherwise. + + """ + return False + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/lib/python3.11/site-packages/anyio/_core/_typedattr.py b/lib/python3.11/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 00000000..f358a448 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + getter = self.extra_attributes[attribute] + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default + + return getter() diff --git a/lib/python3.11/site-packages/anyio/abc/__init__.py b/lib/python3.11/site-packages/anyio/abc/__init__.py new file mode 100644 index 00000000..3d3b61cc --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/__init__.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio.abc."): + __value.__module__ = __name__ + +del __value diff --git a/lib/python3.11/site-packages/anyio/abc/_eventloop.py b/lib/python3.11/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 00000000..2bfdf286 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,376 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from contextlib import AbstractContextManager +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + TypeVar, + Union, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + from _typeshed import HasFileno + + from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ..from_thread import BlockingPortal + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + + :return: + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + pass + + @classmethod + @abstractmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def create_blocking_portal(cls) -> BlockingPortal: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_readable(cls, obj: HasFileno | int) -> None: + pass + + @classmethod + @abstractmethod + async def wait_writable(cls, obj: HasFileno | int) -> None: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/lib/python3.11/site-packages/anyio/abc/_resources.py b/lib/python3.11/site-packages/anyio/abc/_resources.py new file mode 100644 index 00000000..10df115a --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_resources.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + __slots__ = () + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/lib/python3.11/site-packages/anyio/abc/_sockets.py b/lib/python3.11/site-packages/anyio/abc/_sockets.py new file mode 100644 index 00000000..1c6a450c --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +import socket +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import Any, TypeVar, Union + +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType = tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/lib/python3.11/site-packages/anyio/abc/_streams.py b/lib/python3.11/site-packages/anyio/abc/_streams.py new file mode 100644 index 00000000..8c638683 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_streams.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementors of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/lib/python3.11/site-packages/anyio/abc/_subprocesses.py b/lib/python3.11/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 00000000..ce0564ce --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/lib/python3.11/site-packages/anyio/abc/_tasks.py b/lib/python3.11/site-packages/anyio/abc/_tasks.py new file mode 100644 index 00000000..f6e5c40c --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: ... + + @overload + def started(self, value: T_contra) -> None: ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + + .. note:: On asyncio, support for eager task factories is considered to be + **experimental**. In particular, they don't follow the usual semantics of new + tasks being scheduled on the next iteration of the event loop, and may thus + cause unexpected behavior in code that wasn't written with such semantics in + mind. + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/lib/python3.11/site-packages/anyio/abc/_testing.py b/lib/python3.11/site-packages/anyio/abc/_testing.py new file mode 100644 index 00000000..7c50ed76 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/abc/_testing.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/lib/python3.11/site-packages/anyio/from_thread.py b/lib/python3.11/site-packages/anyio/from_thread.py new file mode 100644 index 00000000..93a4cfe8 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/from_thread.py @@ -0,0 +1,527 @@ +from __future__ import annotations + +import sys +from collections.abc import Awaitable, Callable, Generator +from concurrent.futures import Future +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + contextmanager, +) +from dataclasses import dataclass, field +from inspect import isawaitable +from threading import Lock, Thread, get_ident +from types import TracebackType +from typing import ( + Any, + Generic, + TypeVar, + cast, + overload, +) + +from ._core import _eventloop +from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc import AsyncBackend +from .abc._tasks import TaskStatus + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_async_from_thread(func, args, token=token) + + +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_sync_from_thread(func, args, token=token) + + +class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): + _enter_future: Future[T_co] + _exit_future: Future[bool | None] + _exit_event: Event + _exit_exc_info: tuple[ + type[BaseException] | None, BaseException | None, TracebackType | None + ] = (None, None, None) + + def __init__( + self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal + ): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> bool | None: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + return self._enter_future.result() + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> BlockingPortal: + return get_async_backend().create_blocking_portal() + + def __init__(self) -> None: + self._event_loop_thread_id: int | None = get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> BlockingPortal: + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], + ) -> None: + def callback(f: Future[T_Retval]) -> None: + if f.cancelled() and self._event_loop_thread_id not in ( + None, + get_ident(), + ): + self.call(scope.cancel) + + try: + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable + except self._cancelled_exc_class: + future.cancel() + future.set_running_or_notify_cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + """ + Spawn a new task using the given callable. + + Implementors must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: ... + + @overload + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: ... + + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the + task completes successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + :rtype: concurrent.futures.Future[T_Retval] + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future[T_Retval] = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`.abc.TaskGroup.start`. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` + is the value passed to ``task_status.started()`` from within the target + function + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future[T_Retval]) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AbstractAsyncContextManager[T_co] + ) -> AbstractContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +@dataclass +class BlockingPortalProvider: + """ + A manager for a blocking portal. Used as a context manager. The first thread to + enter this context manager causes a blocking portal to be started with the specific + parameters, and the last thread to exit causes the portal to be shut down. Thus, + there will be exactly one blocking portal running in this context as long as at + least one thread has entered this context manager. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + + .. versionadded:: 4.4 + """ + + backend: str = "asyncio" + backend_options: dict[str, Any] | None = None + _lock: Lock = field(init=False, default_factory=Lock) + _leases: int = field(init=False, default=0) + _portal: BlockingPortal = field(init=False) + _portal_cm: AbstractContextManager[BlockingPortal] | None = field( + init=False, default=None + ) + + def __enter__(self) -> BlockingPortal: + with self._lock: + if self._portal_cm is None: + self._portal_cm = start_blocking_portal( + self.backend, self.backend_options + ) + self._portal = self._portal_cm.__enter__() + + self._leases += 1 + return self._portal + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + portal_cm: AbstractContextManager[BlockingPortal] | None = None + with self._lock: + assert self._portal_cm + assert self._leases > 0 + self._leases -= 1 + if not self._leases: + portal_cm = self._portal_cm + self._portal_cm = None + del self._portal + + if portal_cm: + portal_cm.__exit__(None, None, None) + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", backend_options: dict[str, Any] | None = None +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + future.set_result(portal_) + await portal_.sleep_until_stopped() + + def run_blocking_portal() -> None: + if future.set_running_or_notify_cancel(): + try: + _eventloop.run( + run_portal, backend=backend, backend_options=backend_options + ) + except BaseException as exc: + if not future.done(): + future.set_exception(exc) + + future: Future[BlockingPortal] = Future() + thread = Thread(target=run_blocking_portal, daemon=True) + thread.start() + try: + cancel_remaining_tasks = False + portal = future.result() + try: + yield portal + except BaseException: + cancel_remaining_tasks = True + raise + finally: + try: + portal.call(portal.stop, cancel_remaining_tasks) + except RuntimeError: + pass + finally: + thread.join() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + async_backend: AsyncBackend = threadlocals.current_async_backend + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + async_backend.check_cancelled() diff --git a/lib/python3.11/site-packages/anyio/lowlevel.py b/lib/python3.11/site-packages/anyio/lowlevel.py new file mode 100644 index 00000000..14c7668c --- /dev/null +++ b/lib/python3.11/site-packages/anyio/lowlevel.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +import enum +from dataclasses import dataclass +from typing import Any, Generic, Literal, TypeVar, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_async_backend + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().cancel_shielded_checkpoint() + + +def current_token() -> object: + """ + Return a backend specific token object that can be used to get back to the event + loop. + + """ + return get_async_backend().current_token() + + +_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() +_token_wrappers: dict[Any, _TokenWrapper] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = "_token", "__weakref__" + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): + self._var = var + self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """ + Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. + """ + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: set[_TokenWrapper] = set() + + def __init__( + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> dict[str, T]: + token = current_token() + try: + return _run_vars[token] + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> T | D: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ) -> T | D: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/lib/python3.11/site-packages/anyio/py.typed b/lib/python3.11/site-packages/anyio/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/lib/python3.11/site-packages/anyio/pytest_plugin.py b/lib/python3.11/site-packages/anyio/pytest_plugin.py new file mode 100644 index 00000000..4a0d59dd --- /dev/null +++ b/lib/python3.11/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,191 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator, Iterator +from contextlib import ExitStack, contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction, ismethod +from typing import Any, cast + +import pytest +import sniffio +from _pytest.fixtures import SubRequest +from _pytest.outcomes import Exit + +from ._core._eventloop import get_all_backends, get_async_backend +from ._core._exceptions import iterate_exceptions +from .abc import TestRunner + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +_current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 + + +def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(tuple[str, dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: dict[str, Any] +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = sniffio.current_async_library_cvar.set(backend_name) + _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) + + backend_options = backend_options or {} + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner + finally: + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None + + +def pytest_configure(config: Any) -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run " + "asynchronously via anyio.", + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef: Any, request: Any) -> Generator[Any]: + def wrapper( + *args: Any, anyio_backend: Any, request: SubRequest, **kwargs: Any + ) -> Any: + # Rebind any fixture methods to the request instance + if ( + request.instance + and ismethod(func) + and type(func.__self__) is type(request.instance) + ): + local_func = func.__func__.__get__(request.instance) + else: + local_func = func + + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + if has_request_arg: + kwargs["request"] = request + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(local_func): + yield from runner.run_asyncgen_fixture(local_func, kwargs) + else: + yield runner.run_fixture(local_func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + fixturedef.func = wrapper + original_argname = fixturedef.argnames + + if not (has_backend_arg := "anyio_backend" in fixturedef.argnames): + fixturedef.argnames += ("anyio_backend",) + + if not (has_request_arg := "request" in fixturedef.argnames): + fixturedef.argnames += ("request",) + + try: + return (yield) + finally: + fixturedef.func = func + fixturedef.argnames = original_argname + + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if marker or any(marker.name == "anyio" for marker in own_markers): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + try: + runner.run_test(pyfuncitem.obj, testargs) + except ExceptionGroup as excgrp: + for exc in iterate_exceptions(excgrp): + if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)): + raise exc from excgrp + + raise + + return True + + return None + + +@pytest.fixture(scope="module", params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] diff --git a/lib/python3.11/site-packages/anyio/streams/__init__.py b/lib/python3.11/site-packages/anyio/streams/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/python3.11/site-packages/anyio/streams/buffered.py b/lib/python3.11/site-packages/anyio/streams/buffered.py new file mode 100644 index 00000000..f5d5e836 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/streams/buffered.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from dataclasses import dataclass, field +from typing import Any + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/lib/python3.11/site-packages/anyio/streams/file.py b/lib/python3.11/site-packages/anyio/streams/file.py new file mode 100644 index 00000000..f4924642 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/streams/file.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/lib/python3.11/site-packages/anyio/streams/memory.py b/lib/python3.11/site-packages/anyio/streams/memory.py new file mode 100644 index 00000000..b547aa6a --- /dev/null +++ b/lib/python3.11/site-packages/anyio/streams/memory.py @@ -0,0 +1,317 @@ +from __future__ import annotations + +import warnings +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._testing import TaskInfo, get_current_task +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectItemReceiver(Generic[T_Item]): + task_info: TaskInfo = field(init=False, default_factory=get_current_task) + item: T_Item = field(init=False) + + def __repr__(self) -> str: + # When item is not defined, we get following error with default __repr__: + # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' + item = getattr(self, "item", None) + return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, MemoryObjectItemReceiver[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + receiver = MemoryObjectItemReceiver[T_co]() + self._state.waiting_receivers[receive_event] = receiver + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + try: + return receiver.item + except AttributeError: + raise EndOfStream + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + source=self, + ) + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + while self._state.waiting_receivers: + receive_event, receiver = self._state.waiting_receivers.popitem(last=False) + if not receiver.task_info.has_pending_cancellation(): + receiver.item = item + receive_event.set() + return + + if len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if send_event in self._state.waiting_senders: + del self._state.waiting_senders[send_event] + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + source=self, + ) diff --git a/lib/python3.11/site-packages/anyio/streams/stapled.py b/lib/python3.11/site-packages/anyio/streams/stapled.py new file mode 100644 index 00000000..80f64a2e --- /dev/null +++ b/lib/python3.11/site-packages/anyio/streams/stapled.py @@ -0,0 +1,141 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/lib/python3.11/site-packages/anyio/streams/text.py b/lib/python3.11/site-packages/anyio/streams/text.py new file mode 100644 index 00000000..f1a11278 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/streams/text.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +import codecs +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, +) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } diff --git a/lib/python3.11/site-packages/anyio/streams/tls.py b/lib/python3.11/site-packages/anyio/streams/tls.py new file mode 100644 index 00000000..b6961bee --- /dev/null +++ b/lib/python3.11/site-packages/anyio/streams/tls.py @@ -0,0 +1,337 @@ +from __future__ import annotations + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from typing import Any, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT = tuple[tuple[str, str], ...] +_PCTRTTT = tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if isinstance(exc, ssl.SSLEOFError) or ( + exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None, + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/lib/python3.11/site-packages/anyio/to_interpreter.py b/lib/python3.11/site-packages/anyio/to_interpreter.py new file mode 100644 index 00000000..bcde24d3 --- /dev/null +++ b/lib/python3.11/site-packages/anyio/to_interpreter.py @@ -0,0 +1,218 @@ +from __future__ import annotations + +import atexit +import os +import pickle +import sys +from collections import deque +from collections.abc import Callable +from textwrap import dedent +from typing import Any, Final, TypeVar + +from . import current_time, to_thread +from ._core._exceptions import BrokenWorkerIntepreter +from ._core._synchronization import CapacityLimiter +from .lowlevel import RunVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +UNBOUND: Final = 2 # I have no clue how this works, but it was used in the stdlib +FMT_UNPICKLED: Final = 0 +FMT_PICKLED: Final = 1 +DEFAULT_CPU_COUNT: Final = 8 # this is just an arbitrarily selected value +MAX_WORKER_IDLE_TIME = ( + 30 # seconds a subinterpreter can be idle before becoming eligible for pruning +) + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_idle_workers = RunVar[deque["Worker"]]("_available_workers") +_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter") + + +class Worker: + _run_func = compile( + dedent(""" + import _interpqueues as queues + import _interpreters as interpreters + from pickle import loads, dumps, HIGHEST_PROTOCOL + + item = queues.get(queue_id)[0] + try: + func, args = loads(item) + retval = func(*args) + except BaseException as exc: + is_exception = True + retval = exc + else: + is_exception = False + + try: + queues.put(queue_id, (retval, is_exception), FMT_UNPICKLED, UNBOUND) + except interpreters.NotShareableError: + retval = dumps(retval, HIGHEST_PROTOCOL) + queues.put(queue_id, (retval, is_exception), FMT_PICKLED, UNBOUND) + """), + "", + "exec", + ) + + last_used: float = 0 + + _initialized: bool = False + _interpreter_id: int + _queue_id: int + + def initialize(self) -> None: + import _interpqueues as queues + import _interpreters as interpreters + + self._interpreter_id = interpreters.create() + self._queue_id = queues.create(2, FMT_UNPICKLED, UNBOUND) # type: ignore[call-arg] + self._initialized = True + interpreters.set___main___attrs( + self._interpreter_id, + { + "queue_id": self._queue_id, + "FMT_PICKLED": FMT_PICKLED, + "FMT_UNPICKLED": FMT_UNPICKLED, + "UNBOUND": UNBOUND, + }, + ) + + def destroy(self) -> None: + import _interpqueues as queues + import _interpreters as interpreters + + if self._initialized: + interpreters.destroy(self._interpreter_id) + queues.destroy(self._queue_id) + + def _call( + self, + func: Callable[..., T_Retval], + args: tuple[Any], + ) -> tuple[Any, bool]: + import _interpqueues as queues + import _interpreters as interpreters + + if not self._initialized: + self.initialize() + + payload = pickle.dumps((func, args), pickle.HIGHEST_PROTOCOL) + queues.put(self._queue_id, payload, FMT_PICKLED, UNBOUND) # type: ignore[call-arg] + + res: Any + is_exception: bool + if exc_info := interpreters.exec(self._interpreter_id, self._run_func): # type: ignore[func-returns-value,arg-type] + raise BrokenWorkerIntepreter(exc_info) + + (res, is_exception), fmt = queues.get(self._queue_id)[:2] + if fmt == FMT_PICKLED: + res = pickle.loads(res) + + return res, is_exception + + async def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any], + limiter: CapacityLimiter, + ) -> T_Retval: + result, is_exception = await to_thread.run_sync( + self._call, + func, + args, + limiter=limiter, + ) + if is_exception: + raise result + + return result + + +def _stop_workers(workers: deque[Worker]) -> None: + for worker in workers: + worker.destroy() + + workers.clear() + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a subinterpreter. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the call will still run its course but its return value (or any raised + exception) will be ignored. + + .. warning:: This feature is **experimental**. The upstream interpreter API has not + yet been finalized or thoroughly tested, so don't rely on this for anything + mission critical. + + :param func: a callable + :param args: positional arguments for the callable + :param limiter: capacity limiter to use to limit the total amount of subinterpreters + running (if omitted, the default limiter is used) + :return: the result of the call + :raises BrokenWorkerIntepreter: if there's an internal error in a subinterpreter + + """ + if sys.version_info <= (3, 13): + raise RuntimeError("subinterpreters require at least Python 3.13") + + if limiter is None: + limiter = current_default_interpreter_limiter() + + try: + idle_workers = _idle_workers.get() + except LookupError: + idle_workers = deque() + _idle_workers.set(idle_workers) + atexit.register(_stop_workers, idle_workers) + + async with limiter: + try: + worker = idle_workers.pop() + except IndexError: + worker = Worker() + + try: + return await worker.call(func, args, limiter) + finally: + # Prune workers that have been idle for too long + now = current_time() + while idle_workers: + if now - idle_workers[0].last_used <= MAX_WORKER_IDLE_TIME: + break + + await to_thread.run_sync(idle_workers.popleft().destroy, limiter=limiter) + + worker.last_used = current_time() + idle_workers.append(worker) + + +def current_default_interpreter_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrently running subinterpreters. + + Defaults to the number of CPU cores. + + :return: a capacity limiter object + + """ + try: + return _default_interpreter_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or DEFAULT_CPU_COUNT) + _default_interpreter_limiter.set(limiter) + return limiter diff --git a/lib/python3.11/site-packages/anyio/to_process.py b/lib/python3.11/site-packages/anyio/to_process.py new file mode 100644 index 00000000..495de2ae --- /dev/null +++ b/lib/python3.11/site-packages/anyio/to_process.py @@ -0,0 +1,258 @@ +from __future__ import annotations + +import os +import pickle +import subprocess +import sys +from collections import deque +from collections.abc import Callable +from importlib.util import module_from_spec, spec_from_file_location +from typing import TypeVar, cast + +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( # type: ignore[return] + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) + + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer + now = current_time() + killed_processes: list[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) + + with CancelScope(shield=True): + for killed_process in killed_processes: + await killed_process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker + processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: str | None + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path and os.path.isfile(main_module_path): + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/lib/python3.11/site-packages/anyio/to_thread.py b/lib/python3.11/site-packages/anyio/to_thread.py new file mode 100644 index 00000000..5070516e --- /dev/null +++ b/lib/python3.11/site-packages/anyio/to_thread.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import sys +from collections.abc import Callable +from typing import TypeVar +from warnings import warn + +from ._core._eventloop import get_async_backend +from .abc import CapacityLimiter + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) + + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter + ) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrent threads. + + :return: a capacity limiter object + + """ + return get_async_backend().current_default_thread_limiter() diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/INSTALLER b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/METADATA b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/METADATA new file mode 100644 index 00000000..a2681d72 --- /dev/null +++ b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/METADATA @@ -0,0 +1,122 @@ +Metadata-Version: 2.1 +Name: beautifulsoup4 +Version: 4.12.3 +Summary: Screen-scraping library +Project-URL: Download, https://www.crummy.com/software/BeautifulSoup/bs4/download/ +Project-URL: Homepage, https://www.crummy.com/software/BeautifulSoup/bs4/ +Author-email: Leonard Richardson +License: MIT License +License-File: AUTHORS +License-File: LICENSE +Keywords: HTML,XML,parse,soup +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Topic :: Text Processing :: Markup :: SGML +Classifier: Topic :: Text Processing :: Markup :: XML +Requires-Python: >=3.6.0 +Requires-Dist: soupsieve>1.2 +Provides-Extra: cchardet +Requires-Dist: cchardet; extra == 'cchardet' +Provides-Extra: chardet +Requires-Dist: chardet; extra == 'chardet' +Provides-Extra: charset-normalizer +Requires-Dist: charset-normalizer; extra == 'charset-normalizer' +Provides-Extra: html5lib +Requires-Dist: html5lib; extra == 'html5lib' +Provides-Extra: lxml +Requires-Dist: lxml; extra == 'lxml' +Description-Content-Type: text/markdown + +Beautiful Soup is a library that makes it easy to scrape information +from web pages. It sits atop an HTML or XML parser, providing Pythonic +idioms for iterating, searching, and modifying the parse tree. + +# Quick start + +``` +>>> from bs4 import BeautifulSoup +>>> soup = BeautifulSoup("

SomebadHTML") +>>> print(soup.prettify()) + + +

+ Some + + bad + + HTML + + +

+ + +>>> soup.find(text="bad") +'bad' +>>> soup.i +HTML +# +>>> soup = BeautifulSoup("SomebadXML", "xml") +# +>>> print(soup.prettify()) + + + Some + + bad + + XML + + +``` + +To go beyond the basics, [comprehensive documentation is available](https://www.crummy.com/software/BeautifulSoup/bs4/doc/). + +# Links + +* [Homepage](https://www.crummy.com/software/BeautifulSoup/bs4/) +* [Documentation](https://www.crummy.com/software/BeautifulSoup/bs4/doc/) +* [Discussion group](https://groups.google.com/group/beautifulsoup/) +* [Development](https://code.launchpad.net/beautifulsoup/) +* [Bug tracker](https://bugs.launchpad.net/beautifulsoup/) +* [Complete changelog](https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG) + +# Note on Python 2 sunsetting + +Beautiful Soup's support for Python 2 was discontinued on December 31, +2020: one year after the sunset date for Python 2 itself. From this +point onward, new Beautiful Soup development will exclusively target +Python 3. The final release of Beautiful Soup 4 to support Python 2 +was 4.9.3. + +# Supporting the project + +If you use Beautiful Soup as part of your professional work, please consider a +[Tidelift subscription](https://tidelift.com/subscription/pkg/pypi-beautifulsoup4?utm_source=pypi-beautifulsoup4&utm_medium=referral&utm_campaign=readme). +This will support many of the free software projects your organization +depends on, not just Beautiful Soup. + +If you use Beautiful Soup for personal projects, the best way to say +thank you is to read +[Tool Safety](https://www.crummy.com/software/BeautifulSoup/zine/), a zine I +wrote about what Beautiful Soup has taught me about software +development. + +# Building the documentation + +The bs4/doc/ directory contains full documentation in Sphinx +format. Run `make html` in that directory to create HTML +documentation. + +# Running the unit tests + +Beautiful Soup supports unit test discovery using Pytest: + +``` +$ pytest +``` + diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/RECORD b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/RECORD new file mode 100644 index 00000000..262cd9cd --- /dev/null +++ b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/RECORD @@ -0,0 +1,79 @@ +beautifulsoup4-4.12.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +beautifulsoup4-4.12.3.dist-info/METADATA,sha256=UkOS1koIjlakIy9Q1u2yCNwDEFOUZSrLcsbV-mTInz4,3790 +beautifulsoup4-4.12.3.dist-info/RECORD,, +beautifulsoup4-4.12.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +beautifulsoup4-4.12.3.dist-info/WHEEL,sha256=mRYSEL3Ih6g5a_CVMIcwiF__0Ae4_gLYh01YFNwiq1k,87 +beautifulsoup4-4.12.3.dist-info/licenses/AUTHORS,sha256=uSIdbrBb1sobdXl7VrlUvuvim2dN9kF3MH4Edn0WKGE,2176 +beautifulsoup4-4.12.3.dist-info/licenses/LICENSE,sha256=VbTY1LHlvIbRDvrJG3TIe8t3UmsPW57a-LnNKtxzl7I,1441 +bs4/__init__.py,sha256=kq32cCtQiNjjU9XwjD0b1jdXN5WEC87nJqSSW3PhVkM,33822 +bs4/__pycache__/__init__.cpython-311.pyc,, +bs4/__pycache__/css.cpython-311.pyc,, +bs4/__pycache__/dammit.cpython-311.pyc,, +bs4/__pycache__/diagnose.cpython-311.pyc,, +bs4/__pycache__/element.cpython-311.pyc,, +bs4/__pycache__/formatter.cpython-311.pyc,, +bs4/builder/__init__.py,sha256=nwb35ftjcwzOs2WkjVm1zvfi7FxSyJP-nN1YheIVT14,24566 +bs4/builder/__pycache__/__init__.cpython-311.pyc,, +bs4/builder/__pycache__/_html5lib.cpython-311.pyc,, +bs4/builder/__pycache__/_htmlparser.cpython-311.pyc,, +bs4/builder/__pycache__/_lxml.cpython-311.pyc,, +bs4/builder/_html5lib.py,sha256=0w-hmPM5wWR2iDuRCR6MvY6ZPXbg_hgddym-YWqj03s,19114 +bs4/builder/_htmlparser.py,sha256=_VD5Z08j6A9YYMR4y7ZTfdMzwiCBsSUQAPuHiYB-WZI,14923 +bs4/builder/_lxml.py,sha256=yKdMx1kdX7H2CopwSWEYm4Sgrfkd-WDj8HbskcaLauU,14948 +bs4/css.py,sha256=gqGaHRrKeCRF3gDqxzeU0uclOCeSsTpuW9gUaSnJeWc,10077 +bs4/dammit.py,sha256=G0cQfsEqfwJ-FIQMkXgCJwSHMn7t9vPepCrud6fZEKk,41158 +bs4/diagnose.py,sha256=uAwdDugL_67tB-BIwDIFLFbiuzGxP2wQzJJ4_bGYUrA,7195 +bs4/element.py,sha256=Dsol2iehkSjk10GzYgwFyjUEgpqmYZpyaAmbL0rWM2w,92845 +bs4/formatter.py,sha256=Bu4utAQYT9XDJaPPpTRM-dyxJDVLdxf_as-IU5gSY8A,7188 +bs4/tests/__init__.py,sha256=NydTegds_r7MoOEuQLS6TFmTA9TwK3KxJhwEkqjCGTQ,48392 +bs4/tests/__pycache__/__init__.cpython-311.pyc,, +bs4/tests/__pycache__/test_builder.cpython-311.pyc,, +bs4/tests/__pycache__/test_builder_registry.cpython-311.pyc,, +bs4/tests/__pycache__/test_css.cpython-311.pyc,, +bs4/tests/__pycache__/test_dammit.cpython-311.pyc,, +bs4/tests/__pycache__/test_docs.cpython-311.pyc,, +bs4/tests/__pycache__/test_element.cpython-311.pyc,, +bs4/tests/__pycache__/test_formatter.cpython-311.pyc,, +bs4/tests/__pycache__/test_fuzz.cpython-311.pyc,, +bs4/tests/__pycache__/test_html5lib.cpython-311.pyc,, +bs4/tests/__pycache__/test_htmlparser.cpython-311.pyc,, +bs4/tests/__pycache__/test_lxml.cpython-311.pyc,, +bs4/tests/__pycache__/test_navigablestring.cpython-311.pyc,, +bs4/tests/__pycache__/test_pageelement.cpython-311.pyc,, +bs4/tests/__pycache__/test_soup.cpython-311.pyc,, +bs4/tests/__pycache__/test_tag.cpython-311.pyc,, +bs4/tests/__pycache__/test_tree.cpython-311.pyc,, +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256.testcase,sha256=yUdXkbpNK7LVOQ0LBHMoqZ1rWaBfSXWytoO_xdSm7Ho,15 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4818336571064320.testcase,sha256=Uv_dx4a43TSfoNkjU-jHW2nSXkqHFg4XdAw7SWVObUk,23 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4999465949331456.testcase,sha256=OEyVA0Ej4FxswOElrUNt0In4s4YhrmtaxE_NHGZvGtg,30 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5000587759190016.testcase,sha256=G4vpNBOz-RwMpi6ewEgNEa13zX0sXhmL7VHOyIcdKVQ,15347 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5167584867909632.testcase,sha256=3d8z65o4p7Rur-RmCHoOjzqaYQ8EAtjmiBYTHNyAdl4,19469 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5270998950477824.testcase,sha256=NfGIlit1k40Ip3mlnBkYOkIDJX6gHtjlErwl7gsBjAQ,12 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000.testcase,sha256=xy4i1U0nhFHcnyc5pRKS6JRMvuoCNUur-Scor6UxIGw,4317 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5492400320282624.testcase,sha256=Q-UTYpQBUsWoMgIUspUlzveSI-41s4ABC3jajRb-K0o,11502 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912.testcase,sha256=2bq3S8KxZgk8EajLReHD8m4_0Lj_nrkyJAxB_z_U0D0,5 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896.testcase,sha256=MZDu31LPLfgu6jP9IZkrlwNes3f_sL8WFP5BChkUKdY,35 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440.testcase,sha256=w58r-s6besG5JwPXpnz37W2YTj9-_qxFbk6hiEnKeIQ,51495 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6124268085182464.testcase,sha256=q8rkdMECEXKcqVhOf5zWHkSBTQeOPt0JiLg2TZiPCuk,10380 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224.testcase,sha256=QfzoOxKwNuqG-4xIrea6MOQLXhfAAOQJ0r9u-J6kSNs,19 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6306874195312640.testcase,sha256=MJ2pHFuuCQUiQz1Kor2sof7LWeRERQ6QK43YNqQHg9o,47 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400.testcase,sha256=EItOpSdeD4ewK-qgJ9vtxennwn_huguzXgctrUT7fqE,3546 +bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6600557255327744.testcase,sha256=a2aJTG4FceGSJXsjtxoS8S4jk_8rZsS3aznLkeO2_dY,124 +bs4/tests/fuzz/crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08.testcase,sha256=jRFRtCKlP3-3EDLc_iVRTcE6JNymv0rYcVM6qRaPrxI,2607 +bs4/tests/fuzz/crash-ffbdfa8a2b26f13537b68d3794b0478a4090ee4a.testcase,sha256=7NsdCiXWAhNkmoW1pvF7rbZExyLAQIWtDtSHXIsH6YU,103 +bs4/tests/test_builder.py,sha256=nc2JE5EMrEf-p24qhf2R8qAV5PpFiOuNpYCmtmCjlTI,1115 +bs4/tests/test_builder_registry.py,sha256=7WLj2prjSHGphebnrjQuI6JYr03Uy_c9_CkaFSQ9HRo,5114 +bs4/tests/test_css.py,sha256=jCcgIWem3lyPa5AjhAk9S6fWI07hk1rg0v8coD7bEtI,17279 +bs4/tests/test_dammit.py,sha256=MbSmRN6VEP0Rm56-w6Ja0TW8eC-8ZxOJ-wXWVf_hRi8,15451 +bs4/tests/test_docs.py,sha256=xoAxnUfoQ7aRqGImwW_9BJDU8WNMZHIuvWqVepvWXt8,1127 +bs4/tests/test_element.py,sha256=92oRSRoGk8gIXAbAGHErKzocx2MK32TqcQdUJ-dGQMo,2377 +bs4/tests/test_formatter.py,sha256=eTzj91Lmhv90z-WiHjK3sBJZm0hRk0crFY1TZaXstCY,4148 +bs4/tests/test_fuzz.py,sha256=_K2utiYVkZ22mvh03g8CBioFU1QDJaff1vTaDyXhxNk,6972 +bs4/tests/test_html5lib.py,sha256=2-ipm-_MaPt37WTxEd5DodUTNhS4EbLFKPRaO6XSCW4,8322 +bs4/tests/test_htmlparser.py,sha256=wnngcIlzjEwH21JFfu_mgt6JdpLt0ncJfLcGT7HeGw0,6256 +bs4/tests/test_lxml.py,sha256=nQCmLt7bWk0id7xMumZw--PzEe1xF9PTQn3lvHyNC6I,7635 +bs4/tests/test_navigablestring.py,sha256=RGSgziNf7cZnYdEPsoqL1B2I68TUJp1JmEQVxbh_ryA,5081 +bs4/tests/test_pageelement.py,sha256=VdGjUxx3RhjqmNsJ92ao6VZC_YD7T8mdLkDZjosOYeE,14274 +bs4/tests/test_soup.py,sha256=JmnAPLE1_GXm0wmwEUN7icdvBz9HDch-qoU2mT_TDrs,19877 +bs4/tests/test_tag.py,sha256=FBPDUisDCbFmvl5HmTtN49CGo3YoUXh5Wiuw5FMLS5E,9616 +bs4/tests/test_tree.py,sha256=n9nTQOzJb3-ZnZ6AkmMdZQ5TYcTUPnqHoVgal0mYXfg,48129 diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/REQUESTED b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/WHEEL b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/WHEEL new file mode 100644 index 00000000..2860816a --- /dev/null +++ b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/AUTHORS b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/AUTHORS new file mode 100644 index 00000000..1f14fe07 --- /dev/null +++ b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/AUTHORS @@ -0,0 +1,49 @@ +Behold, mortal, the origins of Beautiful Soup... +================================================ + +Leonard Richardson is the primary maintainer. + +Aaron DeVore and Isaac Muse have made significant contributions to the +code base. + +Mark Pilgrim provided the encoding detection code that forms the base +of UnicodeDammit. + +Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful +Soup 4 working under Python 3. + +Simon Willison wrote soupselect, which was used to make Beautiful Soup +support CSS selectors. Isaac Muse wrote SoupSieve, which made it +possible to _remove_ the CSS selector code from Beautiful Soup. + +Sam Ruby helped with a lot of edge cases. + +Jonathan Ellis was awarded the prestigious Beau Potage D'Or for his +work in solving the nestable tags conundrum. + +An incomplete list of people have contributed patches to Beautiful +Soup: + + Istvan Albert, Andrew Lin, Anthony Baxter, Oliver Beattie, Andrew +Boyko, Tony Chang, Francisco Canas, "Delong", Zephyr Fang, Fuzzy, +Roman Gaufman, Yoni Gilad, Richie Hindle, Toshihiro Kamiya, Peteris +Krumins, Kent Johnson, Marek Kapolka, Andreas Kostyrka, Roel Kramer, +Ben Last, Robert Leftwich, Stefaan Lippens, "liquider", Staffan +Malmgren, Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", +Ed Oskiewicz, Martijn Peters, Greg Phillips, Giles Radford, Stefano +Revera, Arthur Rudolph, Marko Samastur, James Salter, Jouni Seppnen, +Alexander Schmolck, Tim Shirley, Geoffrey Sneddon, Ville Skytt, +"Vikas", Jens Svalgaard, Andy Theyers, Eric Weiser, Glyn Webster, John +Wiseman, Paul Wright, Danny Yoo + +An incomplete list of people who made suggestions or found bugs or +found ways to break Beautiful Soup: + + Hanno Bck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel, + Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes, + Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams, + warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison, + Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed + Summers, Dennis Sutch, Chris Smith, Aaron Swartz, Stuart + Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de + Sousa Rocha, Yichun Wei, Per Vognsen diff --git a/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/LICENSE b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/LICENSE new file mode 100644 index 00000000..08e3a9cf --- /dev/null +++ b/lib/python3.11/site-packages/beautifulsoup4-4.12.3.dist-info/licenses/LICENSE @@ -0,0 +1,31 @@ +Beautiful Soup is made available under the MIT license: + + Copyright (c) Leonard Richardson + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +Beautiful Soup incorporates code from the html5lib library, which is +also made available under the MIT license. Copyright (c) James Graham +and other contributors + +Beautiful Soup has an optional dependency on the soupsieve library, +which is also made available under the MIT license. Copyright (c) +Isaac Muse diff --git a/lib/python3.11/site-packages/bs4/__init__.py b/lib/python3.11/site-packages/bs4/__init__.py new file mode 100644 index 00000000..d8ad5e1d --- /dev/null +++ b/lib/python3.11/site-packages/bs4/__init__.py @@ -0,0 +1,840 @@ +"""Beautiful Soup Elixir and Tonic - "The Screen-Scraper's Friend". + +http://www.crummy.com/software/BeautifulSoup/ + +Beautiful Soup uses a pluggable XML or HTML parser to parse a +(possibly invalid) document into a tree representation. Beautiful Soup +provides methods and Pythonic idioms that make it easy to navigate, +search, and modify the parse tree. + +Beautiful Soup works with Python 3.6 and up. It works better if lxml +and/or html5lib is installed. + +For more than you ever wanted to know about Beautiful Soup, see the +documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ +""" + +__author__ = "Leonard Richardson (leonardr@segfault.org)" +__version__ = "4.12.3" +__copyright__ = "Copyright (c) 2004-2024 Leonard Richardson" +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +__all__ = ['BeautifulSoup'] + +from collections import Counter +import os +import re +import sys +import traceback +import warnings + +# The very first thing we do is give a useful error if someone is +# running this code under Python 2. +if sys.version_info.major < 3: + raise ImportError('You are trying to use a Python 3-specific version of Beautiful Soup under Python 2. This will not work. The final version of Beautiful Soup to support Python 2 was 4.9.3.') + +from .builder import ( + builder_registry, + ParserRejectedMarkup, + XMLParsedAsHTMLWarning, + HTMLParserTreeBuilder +) +from .dammit import UnicodeDammit +from .element import ( + CData, + Comment, + CSS, + DEFAULT_OUTPUT_ENCODING, + Declaration, + Doctype, + NavigableString, + PageElement, + ProcessingInstruction, + PYTHON_SPECIFIC_ENCODINGS, + ResultSet, + Script, + Stylesheet, + SoupStrainer, + Tag, + TemplateString, + ) + +# Define some custom warnings. +class GuessedAtParserWarning(UserWarning): + """The warning issued when BeautifulSoup has to guess what parser to + use -- probably because no parser was specified in the constructor. + """ + +class MarkupResemblesLocatorWarning(UserWarning): + """The warning issued when BeautifulSoup is given 'markup' that + actually looks like a resource locator -- a URL or a path to a file + on disk. + """ + + +class BeautifulSoup(Tag): + """A data structure representing a parsed HTML or XML document. + + Most of the methods you'll call on a BeautifulSoup object are inherited from + PageElement or Tag. + + Internally, this class defines the basic interface called by the + tree builders when converting an HTML/XML document into a data + structure. The interface abstracts away the differences between + parsers. To write a new tree builder, you'll need to understand + these methods as a whole. + + These methods will be called by the BeautifulSoup constructor: + * reset() + * feed(markup) + + The tree builder may call these methods from its feed() implementation: + * handle_starttag(name, attrs) # See note about return value + * handle_endtag(name) + * handle_data(data) # Appends to the current data node + * endData(containerClass) # Ends the current data node + + No matter how complicated the underlying parser is, you should be + able to build a tree using 'start tag' events, 'end tag' events, + 'data' events, and "done with data" events. + + If you encounter an empty-element tag (aka a self-closing tag, + like HTML's
tag), call handle_starttag and then + handle_endtag. + """ + + # Since BeautifulSoup subclasses Tag, it's possible to treat it as + # a Tag with a .name. This name makes it clear the BeautifulSoup + # object isn't a real markup tag. + ROOT_TAG_NAME = '[document]' + + # If the end-user gives no indication which tree builder they + # want, look for one with these features. + DEFAULT_BUILDER_FEATURES = ['html', 'fast'] + + # A string containing all ASCII whitespace characters, used in + # endData() to detect data chunks that seem 'empty'. + ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' + + NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n" + + def __init__(self, markup="", features=None, builder=None, + parse_only=None, from_encoding=None, exclude_encodings=None, + element_classes=None, **kwargs): + """Constructor. + + :param markup: A string or a file-like object representing + markup to be parsed. + + :param features: Desirable features of the parser to be + used. This may be the name of a specific parser ("lxml", + "lxml-xml", "html.parser", or "html5lib") or it may be the + type of markup to be used ("html", "html5", "xml"). It's + recommended that you name a specific parser, so that + Beautiful Soup gives you the same results across platforms + and virtual environments. + + :param builder: A TreeBuilder subclass to instantiate (or + instance to use) instead of looking one up based on + `features`. You only need to use this if you've implemented a + custom TreeBuilder. + + :param parse_only: A SoupStrainer. Only parts of the document + matching the SoupStrainer will be considered. This is useful + when parsing part of a document that would otherwise be too + large to fit into memory. + + :param from_encoding: A string indicating the encoding of the + document to be parsed. Pass this in if Beautiful Soup is + guessing wrongly about the document's encoding. + + :param exclude_encodings: A list of strings indicating + encodings known to be wrong. Pass this in if you don't know + the document's encoding but you know Beautiful Soup's guess is + wrong. + + :param element_classes: A dictionary mapping BeautifulSoup + classes like Tag and NavigableString, to other classes you'd + like to be instantiated instead as the parse tree is + built. This is useful for subclassing Tag or NavigableString + to modify default behavior. + + :param kwargs: For backwards compatibility purposes, the + constructor accepts certain keyword arguments used in + Beautiful Soup 3. None of these arguments do anything in + Beautiful Soup 4; they will result in a warning and then be + ignored. + + Apart from this, any keyword arguments passed into the + BeautifulSoup constructor are propagated to the TreeBuilder + constructor. This makes it possible to configure a + TreeBuilder by passing in arguments, not just by saying which + one to use. + """ + if 'convertEntities' in kwargs: + del kwargs['convertEntities'] + warnings.warn( + "BS4 does not respect the convertEntities argument to the " + "BeautifulSoup constructor. Entities are always converted " + "to Unicode characters.") + + if 'markupMassage' in kwargs: + del kwargs['markupMassage'] + warnings.warn( + "BS4 does not respect the markupMassage argument to the " + "BeautifulSoup constructor. The tree builder is responsible " + "for any necessary markup massage.") + + if 'smartQuotesTo' in kwargs: + del kwargs['smartQuotesTo'] + warnings.warn( + "BS4 does not respect the smartQuotesTo argument to the " + "BeautifulSoup constructor. Smart quotes are always converted " + "to Unicode characters.") + + if 'selfClosingTags' in kwargs: + del kwargs['selfClosingTags'] + warnings.warn( + "BS4 does not respect the selfClosingTags argument to the " + "BeautifulSoup constructor. The tree builder is responsible " + "for understanding self-closing tags.") + + if 'isHTML' in kwargs: + del kwargs['isHTML'] + warnings.warn( + "BS4 does not respect the isHTML argument to the " + "BeautifulSoup constructor. Suggest you use " + "features='lxml' for HTML and features='lxml-xml' for " + "XML.") + + def deprecated_argument(old_name, new_name): + if old_name in kwargs: + warnings.warn( + 'The "%s" argument to the BeautifulSoup constructor ' + 'has been renamed to "%s."' % (old_name, new_name), + DeprecationWarning, stacklevel=3 + ) + return kwargs.pop(old_name) + return None + + parse_only = parse_only or deprecated_argument( + "parseOnlyThese", "parse_only") + + from_encoding = from_encoding or deprecated_argument( + "fromEncoding", "from_encoding") + + if from_encoding and isinstance(markup, str): + warnings.warn("You provided Unicode markup but also provided a value for from_encoding. Your from_encoding will be ignored.") + from_encoding = None + + self.element_classes = element_classes or dict() + + # We need this information to track whether or not the builder + # was specified well enough that we can omit the 'you need to + # specify a parser' warning. + original_builder = builder + original_features = features + + if isinstance(builder, type): + # A builder class was passed in; it needs to be instantiated. + builder_class = builder + builder = None + elif builder is None: + if isinstance(features, str): + features = [features] + if features is None or len(features) == 0: + features = self.DEFAULT_BUILDER_FEATURES + builder_class = builder_registry.lookup(*features) + if builder_class is None: + raise FeatureNotFound( + "Couldn't find a tree builder with the features you " + "requested: %s. Do you need to install a parser library?" + % ",".join(features)) + + # At this point either we have a TreeBuilder instance in + # builder, or we have a builder_class that we can instantiate + # with the remaining **kwargs. + if builder is None: + builder = builder_class(**kwargs) + if not original_builder and not ( + original_features == builder.NAME or + original_features in builder.ALTERNATE_NAMES + ) and markup: + # The user did not tell us which TreeBuilder to use, + # and we had to guess. Issue a warning. + if builder.is_xml: + markup_type = "XML" + else: + markup_type = "HTML" + + # This code adapted from warnings.py so that we get the same line + # of code as our warnings.warn() call gets, even if the answer is wrong + # (as it may be in a multithreading situation). + caller = None + try: + caller = sys._getframe(1) + except ValueError: + pass + if caller: + globals = caller.f_globals + line_number = caller.f_lineno + else: + globals = sys.__dict__ + line_number= 1 + filename = globals.get('__file__') + if filename: + fnl = filename.lower() + if fnl.endswith((".pyc", ".pyo")): + filename = filename[:-1] + if filename: + # If there is no filename at all, the user is most likely in a REPL, + # and the warning is not necessary. + values = dict( + filename=filename, + line_number=line_number, + parser=builder.NAME, + markup_type=markup_type + ) + warnings.warn( + self.NO_PARSER_SPECIFIED_WARNING % values, + GuessedAtParserWarning, stacklevel=2 + ) + else: + if kwargs: + warnings.warn("Keyword arguments to the BeautifulSoup constructor will be ignored. These would normally be passed into the TreeBuilder constructor, but a TreeBuilder instance was passed in as `builder`.") + + self.builder = builder + self.is_xml = builder.is_xml + self.known_xml = self.is_xml + self._namespaces = dict() + self.parse_only = parse_only + + if hasattr(markup, 'read'): # It's a file-type object. + markup = markup.read() + elif len(markup) <= 256 and ( + (isinstance(markup, bytes) and not b'<' in markup) + or (isinstance(markup, str) and not '<' in markup) + ): + # Issue warnings for a couple beginner problems + # involving passing non-markup to Beautiful Soup. + # Beautiful Soup will still parse the input as markup, + # since that is sometimes the intended behavior. + if not self._markup_is_url(markup): + self._markup_resembles_filename(markup) + + rejections = [] + success = False + for (self.markup, self.original_encoding, self.declared_html_encoding, + self.contains_replacement_characters) in ( + self.builder.prepare_markup( + markup, from_encoding, exclude_encodings=exclude_encodings)): + self.reset() + self.builder.initialize_soup(self) + try: + self._feed() + success = True + break + except ParserRejectedMarkup as e: + rejections.append(e) + pass + + if not success: + other_exceptions = [str(e) for e in rejections] + raise ParserRejectedMarkup( + "The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.\n\nOriginal exception(s) from parser:\n " + "\n ".join(other_exceptions) + ) + + # Clear out the markup and remove the builder's circular + # reference to this object. + self.markup = None + self.builder.soup = None + + def _clone(self): + """Create a new BeautifulSoup object with the same TreeBuilder, + but not associated with any markup. + + This is the first step of the deepcopy process. + """ + clone = type(self)("", None, self.builder) + + # Keep track of the encoding of the original document, + # since we won't be parsing it again. + clone.original_encoding = self.original_encoding + return clone + + def __getstate__(self): + # Frequently a tree builder can't be pickled. + d = dict(self.__dict__) + if 'builder' in d and d['builder'] is not None and not self.builder.picklable: + d['builder'] = type(self.builder) + # Store the contents as a Unicode string. + d['contents'] = [] + d['markup'] = self.decode() + + # If _most_recent_element is present, it's a Tag object left + # over from initial parse. It might not be picklable and we + # don't need it. + if '_most_recent_element' in d: + del d['_most_recent_element'] + return d + + def __setstate__(self, state): + # If necessary, restore the TreeBuilder by looking it up. + self.__dict__ = state + if isinstance(self.builder, type): + self.builder = self.builder() + elif not self.builder: + # We don't know which builder was used to build this + # parse tree, so use a default we know is always available. + self.builder = HTMLParserTreeBuilder() + self.builder.soup = self + self.reset() + self._feed() + return state + + + @classmethod + def _decode_markup(cls, markup): + """Ensure `markup` is bytes so it's safe to send into warnings.warn. + + TODO: warnings.warn had this problem back in 2010 but it might not + anymore. + """ + if isinstance(markup, bytes): + decoded = markup.decode('utf-8', 'replace') + else: + decoded = markup + return decoded + + @classmethod + def _markup_is_url(cls, markup): + """Error-handling method to raise a warning if incoming markup looks + like a URL. + + :param markup: A string. + :return: Whether or not the markup resembles a URL + closely enough to justify a warning. + """ + if isinstance(markup, bytes): + space = b' ' + cant_start_with = (b"http:", b"https:") + elif isinstance(markup, str): + space = ' ' + cant_start_with = ("http:", "https:") + else: + return False + + if any(markup.startswith(prefix) for prefix in cant_start_with): + if not space in markup: + warnings.warn( + 'The input looks more like a URL than markup. You may want to use' + ' an HTTP client like requests to get the document behind' + ' the URL, and feed that document to Beautiful Soup.', + MarkupResemblesLocatorWarning, + stacklevel=3 + ) + return True + return False + + @classmethod + def _markup_resembles_filename(cls, markup): + """Error-handling method to raise a warning if incoming markup + resembles a filename. + + :param markup: A bytestring or string. + :return: Whether or not the markup resembles a filename + closely enough to justify a warning. + """ + path_characters = '/\\' + extensions = ['.html', '.htm', '.xml', '.xhtml', '.txt'] + if isinstance(markup, bytes): + path_characters = path_characters.encode("utf8") + extensions = [x.encode('utf8') for x in extensions] + filelike = False + if any(x in markup for x in path_characters): + filelike = True + else: + lower = markup.lower() + if any(lower.endswith(ext) for ext in extensions): + filelike = True + if filelike: + warnings.warn( + 'The input looks more like a filename than markup. You may' + ' want to open this file and pass the filehandle into' + ' Beautiful Soup.', + MarkupResemblesLocatorWarning, stacklevel=3 + ) + return True + return False + + def _feed(self): + """Internal method that parses previously set markup, creating a large + number of Tag and NavigableString objects. + """ + # Convert the document to Unicode. + self.builder.reset() + + self.builder.feed(self.markup) + # Close out any unfinished strings and close all the open tags. + self.endData() + while self.currentTag.name != self.ROOT_TAG_NAME: + self.popTag() + + def reset(self): + """Reset this object to a state as though it had never parsed any + markup. + """ + Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) + self.hidden = 1 + self.builder.reset() + self.current_data = [] + self.currentTag = None + self.tagStack = [] + self.open_tag_counter = Counter() + self.preserve_whitespace_tag_stack = [] + self.string_container_stack = [] + self._most_recent_element = None + self.pushTag(self) + + def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, + sourceline=None, sourcepos=None, **kwattrs): + """Create a new Tag associated with this BeautifulSoup object. + + :param name: The name of the new Tag. + :param namespace: The URI of the new Tag's XML namespace, if any. + :param prefix: The prefix for the new Tag's XML namespace, if any. + :param attrs: A dictionary of this Tag's attribute values; can + be used instead of `kwattrs` for attributes like 'class' + that are reserved words in Python. + :param sourceline: The line number where this tag was + (purportedly) found in its source document. + :param sourcepos: The character position within `sourceline` where this + tag was (purportedly) found. + :param kwattrs: Keyword arguments for the new Tag's attribute values. + + """ + kwattrs.update(attrs) + return self.element_classes.get(Tag, Tag)( + None, self.builder, name, namespace, nsprefix, kwattrs, + sourceline=sourceline, sourcepos=sourcepos + ) + + def string_container(self, base_class=None): + container = base_class or NavigableString + + # There may be a general override of NavigableString. + container = self.element_classes.get( + container, container + ) + + # On top of that, we may be inside a tag that needs a special + # container class. + if self.string_container_stack and container is NavigableString: + container = self.builder.string_containers.get( + self.string_container_stack[-1].name, container + ) + return container + + def new_string(self, s, subclass=None): + """Create a new NavigableString associated with this BeautifulSoup + object. + """ + container = self.string_container(subclass) + return container(s) + + def insert_before(self, *args): + """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement + it because there is nothing before or after it in the parse tree. + """ + raise NotImplementedError("BeautifulSoup objects don't support insert_before().") + + def insert_after(self, *args): + """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement + it because there is nothing before or after it in the parse tree. + """ + raise NotImplementedError("BeautifulSoup objects don't support insert_after().") + + def popTag(self): + """Internal method called by _popToTag when a tag is closed.""" + tag = self.tagStack.pop() + if tag.name in self.open_tag_counter: + self.open_tag_counter[tag.name] -= 1 + if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: + self.preserve_whitespace_tag_stack.pop() + if self.string_container_stack and tag == self.string_container_stack[-1]: + self.string_container_stack.pop() + #print("Pop", tag.name) + if self.tagStack: + self.currentTag = self.tagStack[-1] + return self.currentTag + + def pushTag(self, tag): + """Internal method called by handle_starttag when a tag is opened.""" + #print("Push", tag.name) + if self.currentTag is not None: + self.currentTag.contents.append(tag) + self.tagStack.append(tag) + self.currentTag = self.tagStack[-1] + if tag.name != self.ROOT_TAG_NAME: + self.open_tag_counter[tag.name] += 1 + if tag.name in self.builder.preserve_whitespace_tags: + self.preserve_whitespace_tag_stack.append(tag) + if tag.name in self.builder.string_containers: + self.string_container_stack.append(tag) + + def endData(self, containerClass=None): + """Method called by the TreeBuilder when the end of a data segment + occurs. + """ + if self.current_data: + current_data = ''.join(self.current_data) + # If whitespace is not preserved, and this string contains + # nothing but ASCII spaces, replace it with a single space + # or newline. + if not self.preserve_whitespace_tag_stack: + strippable = True + for i in current_data: + if i not in self.ASCII_SPACES: + strippable = False + break + if strippable: + if '\n' in current_data: + current_data = '\n' + else: + current_data = ' ' + + # Reset the data collector. + self.current_data = [] + + # Should we add this string to the tree at all? + if self.parse_only and len(self.tagStack) <= 1 and \ + (not self.parse_only.text or \ + not self.parse_only.search(current_data)): + return + + containerClass = self.string_container(containerClass) + o = containerClass(current_data) + self.object_was_parsed(o) + + def object_was_parsed(self, o, parent=None, most_recent_element=None): + """Method called by the TreeBuilder to integrate an object into the parse tree.""" + if parent is None: + parent = self.currentTag + if most_recent_element is not None: + previous_element = most_recent_element + else: + previous_element = self._most_recent_element + + next_element = previous_sibling = next_sibling = None + if isinstance(o, Tag): + next_element = o.next_element + next_sibling = o.next_sibling + previous_sibling = o.previous_sibling + if previous_element is None: + previous_element = o.previous_element + + fix = parent.next_element is not None + + o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) + + self._most_recent_element = o + parent.contents.append(o) + + # Check if we are inserting into an already parsed node. + if fix: + self._linkage_fixer(parent) + + def _linkage_fixer(self, el): + """Make sure linkage of this fragment is sound.""" + + first = el.contents[0] + child = el.contents[-1] + descendant = child + + if child is first and el.parent is not None: + # Parent should be linked to first child + el.next_element = child + # We are no longer linked to whatever this element is + prev_el = child.previous_element + if prev_el is not None and prev_el is not el: + prev_el.next_element = None + # First child should be linked to the parent, and no previous siblings. + child.previous_element = el + child.previous_sibling = None + + # We have no sibling as we've been appended as the last. + child.next_sibling = None + + # This index is a tag, dig deeper for a "last descendant" + if isinstance(child, Tag) and child.contents: + descendant = child._last_descendant(False) + + # As the final step, link last descendant. It should be linked + # to the parent's next sibling (if found), else walk up the chain + # and find a parent with a sibling. It should have no next sibling. + descendant.next_element = None + descendant.next_sibling = None + target = el + while True: + if target is None: + break + elif target.next_sibling is not None: + descendant.next_element = target.next_sibling + target.next_sibling.previous_element = child + break + target = target.parent + + def _popToTag(self, name, nsprefix=None, inclusivePop=True): + """Pops the tag stack up to and including the most recent + instance of the given tag. + + If there are no open tags with the given name, nothing will be + popped. + + :param name: Pop up to the most recent tag with this name. + :param nsprefix: The namespace prefix that goes with `name`. + :param inclusivePop: It this is false, pops the tag stack up + to but *not* including the most recent instqance of the + given tag. + + """ + #print("Popping to %s" % name) + if name == self.ROOT_TAG_NAME: + # The BeautifulSoup object itself can never be popped. + return + + most_recently_popped = None + + stack_size = len(self.tagStack) + for i in range(stack_size - 1, 0, -1): + if not self.open_tag_counter.get(name): + break + t = self.tagStack[i] + if (name == t.name and nsprefix == t.prefix): + if inclusivePop: + most_recently_popped = self.popTag() + break + most_recently_popped = self.popTag() + + return most_recently_popped + + def handle_starttag(self, name, namespace, nsprefix, attrs, sourceline=None, + sourcepos=None, namespaces=None): + """Called by the tree builder when a new tag is encountered. + + :param name: Name of the tag. + :param nsprefix: Namespace prefix for the tag. + :param attrs: A dictionary of attribute values. + :param sourceline: The line number where this tag was found in its + source document. + :param sourcepos: The character position within `sourceline` where this + tag was found. + :param namespaces: A dictionary of all namespace prefix mappings + currently in scope in the document. + + If this method returns None, the tag was rejected by an active + SoupStrainer. You should proceed as if the tag had not occurred + in the document. For instance, if this was a self-closing tag, + don't call handle_endtag. + """ + # print("Start tag %s: %s" % (name, attrs)) + self.endData() + + if (self.parse_only and len(self.tagStack) <= 1 + and (self.parse_only.text + or not self.parse_only.search_tag(name, attrs))): + return None + + tag = self.element_classes.get(Tag, Tag)( + self, self.builder, name, namespace, nsprefix, attrs, + self.currentTag, self._most_recent_element, + sourceline=sourceline, sourcepos=sourcepos, + namespaces=namespaces + ) + if tag is None: + return tag + if self._most_recent_element is not None: + self._most_recent_element.next_element = tag + self._most_recent_element = tag + self.pushTag(tag) + return tag + + def handle_endtag(self, name, nsprefix=None): + """Called by the tree builder when an ending tag is encountered. + + :param name: Name of the tag. + :param nsprefix: Namespace prefix for the tag. + """ + #print("End tag: " + name) + self.endData() + self._popToTag(name, nsprefix) + + def handle_data(self, data): + """Called by the tree builder when a chunk of textual data is encountered.""" + self.current_data.append(data) + + def decode(self, pretty_print=False, + eventual_encoding=DEFAULT_OUTPUT_ENCODING, + formatter="minimal", iterator=None): + """Returns a string or Unicode representation of the parse tree + as an HTML or XML document. + + :param pretty_print: If this is True, indentation will be used to + make the document more readable. + :param eventual_encoding: The encoding of the final document. + If this is None, the document will be a Unicode string. + """ + if self.is_xml: + # Print the XML declaration + encoding_part = '' + if eventual_encoding in PYTHON_SPECIFIC_ENCODINGS: + # This is a special Python encoding; it can't actually + # go into an XML document because it means nothing + # outside of Python. + eventual_encoding = None + if eventual_encoding != None: + encoding_part = ' encoding="%s"' % eventual_encoding + prefix = '\n' % encoding_part + else: + prefix = '' + if not pretty_print: + indent_level = None + else: + indent_level = 0 + return prefix + super(BeautifulSoup, self).decode( + indent_level, eventual_encoding, formatter, iterator) + +# Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup' +_s = BeautifulSoup +_soup = BeautifulSoup + +class BeautifulStoneSoup(BeautifulSoup): + """Deprecated interface to an XML parser.""" + + def __init__(self, *args, **kwargs): + kwargs['features'] = 'xml' + warnings.warn( + 'The BeautifulStoneSoup class is deprecated. Instead of using ' + 'it, pass features="xml" into the BeautifulSoup constructor.', + DeprecationWarning, stacklevel=2 + ) + super(BeautifulStoneSoup, self).__init__(*args, **kwargs) + + +class StopParsing(Exception): + """Exception raised by a TreeBuilder if it's unable to continue parsing.""" + pass + +class FeatureNotFound(ValueError): + """Exception raised by the BeautifulSoup constructor if no parser with the + requested features is found. + """ + pass + + +#If this file is run as a script, act as an HTML pretty-printer. +if __name__ == '__main__': + import sys + soup = BeautifulSoup(sys.stdin) + print((soup.prettify())) diff --git a/lib/python3.11/site-packages/bs4/builder/__init__.py b/lib/python3.11/site-packages/bs4/builder/__init__.py new file mode 100644 index 00000000..ffb31fc2 --- /dev/null +++ b/lib/python3.11/site-packages/bs4/builder/__init__.py @@ -0,0 +1,636 @@ +# Use of this source code is governed by the MIT license. +__license__ = "MIT" + +from collections import defaultdict +import itertools +import re +import warnings +import sys +from bs4.element import ( + CharsetMetaAttributeValue, + ContentMetaAttributeValue, + RubyParenthesisString, + RubyTextString, + Stylesheet, + Script, + TemplateString, + nonwhitespace_re +) + +__all__ = [ + 'HTMLTreeBuilder', + 'SAXTreeBuilder', + 'TreeBuilder', + 'TreeBuilderRegistry', + ] + +# Some useful features for a TreeBuilder to have. +FAST = 'fast' +PERMISSIVE = 'permissive' +STRICT = 'strict' +XML = 'xml' +HTML = 'html' +HTML_5 = 'html5' + +class XMLParsedAsHTMLWarning(UserWarning): + """The warning issued when an HTML parser is used to parse + XML that is not XHTML. + """ + MESSAGE = """It looks like you're parsing an XML document using an HTML parser. If this really is an HTML document (maybe it's XHTML?), you can ignore or filter this warning. If it's XML, you should know that using an XML parser will be more reliable. To parse this document as XML, make sure you have the lxml package installed, and pass the keyword argument `features="xml"` into the BeautifulSoup constructor.""" + + +class TreeBuilderRegistry(object): + """A way of looking up TreeBuilder subclasses by their name or by desired + features. + """ + + def __init__(self): + self.builders_for_feature = defaultdict(list) + self.builders = [] + + def register(self, treebuilder_class): + """Register a treebuilder based on its advertised features. + + :param treebuilder_class: A subclass of Treebuilder. its .features + attribute should list its features. + """ + for feature in treebuilder_class.features: + self.builders_for_feature[feature].insert(0, treebuilder_class) + self.builders.insert(0, treebuilder_class) + + def lookup(self, *features): + """Look up a TreeBuilder subclass with the desired features. + + :param features: A list of features to look for. If none are + provided, the most recently registered TreeBuilder subclass + will be used. + :return: A TreeBuilder subclass, or None if there's no + registered subclass with all the requested features. + """ + if len(self.builders) == 0: + # There are no builders at all. + return None + + if len(features) == 0: + # They didn't ask for any features. Give them the most + # recently registered builder. + return self.builders[0] + + # Go down the list of features in order, and eliminate any builders + # that don't match every feature. + features = list(features) + features.reverse() + candidates = None + candidate_set = None + while len(features) > 0: + feature = features.pop() + we_have_the_feature = self.builders_for_feature.get(feature, []) + if len(we_have_the_feature) > 0: + if candidates is None: + candidates = we_have_the_feature + candidate_set = set(candidates) + else: + # Eliminate any candidates that don't have this feature. + candidate_set = candidate_set.intersection( + set(we_have_the_feature)) + + # The only valid candidates are the ones in candidate_set. + # Go through the original list of candidates and pick the first one + # that's in candidate_set. + if candidate_set is None: + return None + for candidate in candidates: + if candidate in candidate_set: + return candidate + return None + +# The BeautifulSoup class will take feature lists from developers and use them +# to look up builders in this registry. +builder_registry = TreeBuilderRegistry() + +class TreeBuilder(object): + """Turn a textual document into a Beautiful Soup object tree.""" + + NAME = "[Unknown tree builder]" + ALTERNATE_NAMES = [] + features = [] + + is_xml = False + picklable = False + empty_element_tags = None # A tag will be considered an empty-element + # tag when and only when it has no contents. + + # A value for these tag/attribute combinations is a space- or + # comma-separated list of CDATA, rather than a single CDATA. + DEFAULT_CDATA_LIST_ATTRIBUTES = defaultdict(list) + + # Whitespace should be preserved inside these tags. + DEFAULT_PRESERVE_WHITESPACE_TAGS = set() + + # The textual contents of tags with these names should be + # instantiated with some class other than NavigableString. + DEFAULT_STRING_CONTAINERS = {} + + USE_DEFAULT = object() + + # Most parsers don't keep track of line numbers. + TRACKS_LINE_NUMBERS = False + + def __init__(self, multi_valued_attributes=USE_DEFAULT, + preserve_whitespace_tags=USE_DEFAULT, + store_line_numbers=USE_DEFAULT, + string_containers=USE_DEFAULT, + ): + """Constructor. + + :param multi_valued_attributes: If this is set to None, the + TreeBuilder will not turn any values for attributes like + 'class' into lists. Setting this to a dictionary will + customize this behavior; look at DEFAULT_CDATA_LIST_ATTRIBUTES + for an example. + + Internally, these are called "CDATA list attributes", but that + probably doesn't make sense to an end-user, so the argument name + is `multi_valued_attributes`. + + :param preserve_whitespace_tags: A list of tags to treat + the way
 tags are treated in HTML. Tags in this list
+         are immune from pretty-printing; their contents will always be
+         output as-is.
+
+        :param string_containers: A dictionary mapping tag names to
+        the classes that should be instantiated to contain the textual
+        contents of those tags. The default is to use NavigableString
+        for every tag, no matter what the name. You can override the
+        default by changing DEFAULT_STRING_CONTAINERS.
+
+        :param store_line_numbers: If the parser keeps track of the
+         line numbers and positions of the original markup, that
+         information will, by default, be stored in each corresponding
+         `Tag` object. You can turn this off by passing
+         store_line_numbers=False. If the parser you're using doesn't 
+         keep track of this information, then setting store_line_numbers=True
+         will do nothing.
+        """
+        self.soup = None
+        if multi_valued_attributes is self.USE_DEFAULT:
+            multi_valued_attributes = self.DEFAULT_CDATA_LIST_ATTRIBUTES
+        self.cdata_list_attributes = multi_valued_attributes
+        if preserve_whitespace_tags is self.USE_DEFAULT:
+            preserve_whitespace_tags = self.DEFAULT_PRESERVE_WHITESPACE_TAGS
+        self.preserve_whitespace_tags = preserve_whitespace_tags
+        if store_line_numbers == self.USE_DEFAULT:
+            store_line_numbers = self.TRACKS_LINE_NUMBERS
+        self.store_line_numbers = store_line_numbers 
+        if string_containers == self.USE_DEFAULT:
+            string_containers = self.DEFAULT_STRING_CONTAINERS
+        self.string_containers = string_containers
+        
+    def initialize_soup(self, soup):
+        """The BeautifulSoup object has been initialized and is now
+        being associated with the TreeBuilder.
+
+        :param soup: A BeautifulSoup object.
+        """
+        self.soup = soup
+        
+    def reset(self):
+        """Do any work necessary to reset the underlying parser
+        for a new document.
+
+        By default, this does nothing.
+        """
+        pass
+
+    def can_be_empty_element(self, tag_name):
+        """Might a tag with this name be an empty-element tag?
+
+        The final markup may or may not actually present this tag as
+        self-closing.
+
+        For instance: an HTMLBuilder does not consider a 

tag to be + an empty-element tag (it's not in + HTMLBuilder.empty_element_tags). This means an empty

tag + will be presented as "

", not "

" or "

". + + The default implementation has no opinion about which tags are + empty-element tags, so a tag will be presented as an + empty-element tag if and only if it has no children. + "" will become "", and "bar" will + be left alone. + + :param tag_name: The name of a markup tag. + """ + if self.empty_element_tags is None: + return True + return tag_name in self.empty_element_tags + + def feed(self, markup): + """Run some incoming markup through some parsing process, + populating the `BeautifulSoup` object in self.soup. + + This method is not implemented in TreeBuilder; it must be + implemented in subclasses. + + :return: None. + """ + raise NotImplementedError() + + def prepare_markup(self, markup, user_specified_encoding=None, + document_declared_encoding=None, exclude_encodings=None): + """Run any preliminary steps necessary to make incoming markup + acceptable to the parser. + + :param markup: Some markup -- probably a bytestring. + :param user_specified_encoding: The user asked to try this encoding. + :param document_declared_encoding: The markup itself claims to be + in this encoding. NOTE: This argument is not used by the + calling code and can probably be removed. + :param exclude_encodings: The user asked _not_ to try any of + these encodings. + + :yield: A series of 4-tuples: + (markup, encoding, declared encoding, + has undergone character replacement) + + Each 4-tuple represents a strategy for converting the + document to Unicode and parsing it. Each strategy will be tried + in turn. + + By default, the only strategy is to parse the markup + as-is. See `LXMLTreeBuilderForXML` and + `HTMLParserTreeBuilder` for implementations that take into + account the quirks of particular parsers. + """ + yield markup, None, None, False + + def test_fragment_to_document(self, fragment): + """Wrap an HTML fragment to make it look like a document. + + Different parsers do this differently. For instance, lxml + introduces an empty tag, and html5lib + doesn't. Abstracting this away lets us write simple tests + which run HTML fragments through the parser and compare the + results against other HTML fragments. + + This method should not be used outside of tests. + + :param fragment: A string -- fragment of HTML. + :return: A string -- a full HTML document. + """ + return fragment + + def set_up_substitutions(self, tag): + """Set up any substitutions that will need to be performed on + a `Tag` when it's output as a string. + + By default, this does nothing. See `HTMLTreeBuilder` for a + case where this is used. + + :param tag: A `Tag` + :return: Whether or not a substitution was performed. + """ + return False + + def _replace_cdata_list_attribute_values(self, tag_name, attrs): + """When an attribute value is associated with a tag that can + have multiple values for that attribute, convert the string + value to a list of strings. + + Basically, replaces class="foo bar" with class=["foo", "bar"] + + NOTE: This method modifies its input in place. + + :param tag_name: The name of a tag. + :param attrs: A dictionary containing the tag's attributes. + Any appropriate attribute values will be modified in place. + """ + if not attrs: + return attrs + if self.cdata_list_attributes: + universal = self.cdata_list_attributes.get('*', []) + tag_specific = self.cdata_list_attributes.get( + tag_name.lower(), None) + for attr in list(attrs.keys()): + if attr in universal or (tag_specific and attr in tag_specific): + # We have a "class"-type attribute whose string + # value is a whitespace-separated list of + # values. Split it into a list. + value = attrs[attr] + if isinstance(value, str): + values = nonwhitespace_re.findall(value) + else: + # html5lib sometimes calls setAttributes twice + # for the same tag when rearranging the parse + # tree. On the second call the attribute value + # here is already a list. If this happens, + # leave the value alone rather than trying to + # split it again. + values = value + attrs[attr] = values + return attrs + +class SAXTreeBuilder(TreeBuilder): + """A Beautiful Soup treebuilder that listens for SAX events. + + This is not currently used for anything, but it demonstrates + how a simple TreeBuilder would work. + """ + + def feed(self, markup): + raise NotImplementedError() + + def close(self): + pass + + def startElement(self, name, attrs): + attrs = dict((key[1], value) for key, value in list(attrs.items())) + #print("Start %s, %r" % (name, attrs)) + self.soup.handle_starttag(name, attrs) + + def endElement(self, name): + #print("End %s" % name) + self.soup.handle_endtag(name) + + def startElementNS(self, nsTuple, nodeName, attrs): + # Throw away (ns, nodeName) for now. + self.startElement(nodeName, attrs) + + def endElementNS(self, nsTuple, nodeName): + # Throw away (ns, nodeName) for now. + self.endElement(nodeName) + #handler.endElementNS((ns, node.nodeName), node.nodeName) + + def startPrefixMapping(self, prefix, nodeValue): + # Ignore the prefix for now. + pass + + def endPrefixMapping(self, prefix): + # Ignore the prefix for now. + # handler.endPrefixMapping(prefix) + pass + + def characters(self, content): + self.soup.handle_data(content) + + def startDocument(self): + pass + + def endDocument(self): + pass + + +class HTMLTreeBuilder(TreeBuilder): + """This TreeBuilder knows facts about HTML. + + Such as which tags are empty-element tags. + """ + + empty_element_tags = set([ + # These are from HTML5. + 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr', + + # These are from earlier versions of HTML and are removed in HTML5. + 'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer' + ]) + + # The HTML standard defines these as block-level elements. Beautiful + # Soup does not treat these elements differently from other elements, + # but it may do so eventually, and this information is available if + # you need to use it. + block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"]) + + # These HTML tags need special treatment so they can be + # represented by a string class other than NavigableString. + # + # For some of these tags, it's because the HTML standard defines + # an unusual content model for them. I made this list by going + # through the HTML spec + # (https://html.spec.whatwg.org/#metadata-content) and looking for + # "metadata content" elements that can contain strings. + # + # The Ruby tags ( and ) are here despite being normal + # "phrasing content" tags, because the content they contain is + # qualitatively different from other text in the document, and it + # can be useful to be able to distinguish it. + # + # TODO: Arguably

 and
+            # 
+
This numeric entity is missing the final semicolon:
+ +
a
+
This document contains (do you see it?)
+
This document ends with That attribute value was bogus
+The doctype is invalid because it contains extra whitespace +
That boolean attribute had no value
+
Here's a nonexistent entity: &#foo; (do you see it?)
+
This document ends before the entity finishes: > +

Paragraphs shouldn't contain block display elements, but this one does:

you see?

+Multiple values for the same attribute. +
Here's a table
+
+
This tag contains nothing but whitespace:
+

This p tag is cut off by

the end of the blockquote tag
+
Here's a nested table:
foo
This table contains bare markup
+ +
This document contains a surprise doctype
+ +
Tag name contains Unicode characters
+ + +""" + + +class SoupTest(object): + + @property + def default_builder(self): + return default_builder + + def soup(self, markup, **kwargs): + """Build a Beautiful Soup object from markup.""" + builder = kwargs.pop('builder', self.default_builder) + return BeautifulSoup(markup, builder=builder, **kwargs) + + def document_for(self, markup, **kwargs): + """Turn an HTML fragment into a document. + + The details depend on the builder. + """ + return self.default_builder(**kwargs).test_fragment_to_document(markup) + + def assert_soup(self, to_parse, compare_parsed_to=None): + """Parse some markup using Beautiful Soup and verify that + the output markup is as expected. + """ + builder = self.default_builder + obj = BeautifulSoup(to_parse, builder=builder) + if compare_parsed_to is None: + compare_parsed_to = to_parse + + # Verify that the documents come out the same. + assert obj.decode() == self.document_for(compare_parsed_to) + + # Also run some checks on the BeautifulSoup object itself: + + # Verify that every tag that was opened was eventually closed. + + # There are no tags in the open tag counter. + assert all(v==0 for v in list(obj.open_tag_counter.values())) + + # The only tag in the tag stack is the one for the root + # document. + assert [obj.ROOT_TAG_NAME] == [x.name for x in obj.tagStack] + + assertSoupEquals = assert_soup + + def assertConnectedness(self, element): + """Ensure that next_element and previous_element are properly + set for all descendants of the given element. + """ + earlier = None + for e in element.descendants: + if earlier: + assert e == earlier.next_element + assert earlier == e.previous_element + earlier = e + + def linkage_validator(self, el, _recursive_call=False): + """Ensure proper linkage throughout the document.""" + descendant = None + # Document element should have no previous element or previous sibling. + # It also shouldn't have a next sibling. + if el.parent is None: + assert el.previous_element is None,\ + "Bad previous_element\nNODE: {}\nPREV: {}\nEXPECTED: {}".format( + el, el.previous_element, None + ) + assert el.previous_sibling is None,\ + "Bad previous_sibling\nNODE: {}\nPREV: {}\nEXPECTED: {}".format( + el, el.previous_sibling, None + ) + assert el.next_sibling is None,\ + "Bad next_sibling\nNODE: {}\nNEXT: {}\nEXPECTED: {}".format( + el, el.next_sibling, None + ) + + idx = 0 + child = None + last_child = None + last_idx = len(el.contents) - 1 + for child in el.contents: + descendant = None + + # Parent should link next element to their first child + # That child should have no previous sibling + if idx == 0: + if el.parent is not None: + assert el.next_element is child,\ + "Bad next_element\nNODE: {}\nNEXT: {}\nEXPECTED: {}".format( + el, el.next_element, child + ) + assert child.previous_element is el,\ + "Bad previous_element\nNODE: {}\nPREV: {}\nEXPECTED: {}".format( + child, child.previous_element, el + ) + assert child.previous_sibling is None,\ + "Bad previous_sibling\nNODE: {}\nPREV {}\nEXPECTED: {}".format( + child, child.previous_sibling, None + ) + + # If not the first child, previous index should link as sibling to this index + # Previous element should match the last index or the last bubbled up descendant + else: + assert child.previous_sibling is el.contents[idx - 1],\ + "Bad previous_sibling\nNODE: {}\nPREV {}\nEXPECTED {}".format( + child, child.previous_sibling, el.contents[idx - 1] + ) + assert el.contents[idx - 1].next_sibling is child,\ + "Bad next_sibling\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + el.contents[idx - 1], el.contents[idx - 1].next_sibling, child + ) + + if last_child is not None: + assert child.previous_element is last_child,\ + "Bad previous_element\nNODE: {}\nPREV {}\nEXPECTED {}\nCONTENTS {}".format( + child, child.previous_element, last_child, child.parent.contents + ) + assert last_child.next_element is child,\ + "Bad next_element\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + last_child, last_child.next_element, child + ) + + if isinstance(child, Tag) and child.contents: + descendant = self.linkage_validator(child, True) + # A bubbled up descendant should have no next siblings + assert descendant.next_sibling is None,\ + "Bad next_sibling\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + descendant, descendant.next_sibling, None + ) + + # Mark last child as either the bubbled up descendant or the current child + if descendant is not None: + last_child = descendant + else: + last_child = child + + # If last child, there are non next siblings + if idx == last_idx: + assert child.next_sibling is None,\ + "Bad next_sibling\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + child, child.next_sibling, None + ) + idx += 1 + + child = descendant if descendant is not None else child + if child is None: + child = el + + if not _recursive_call and child is not None: + target = el + while True: + if target is None: + assert child.next_element is None, \ + "Bad next_element\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + child, child.next_element, None + ) + break + elif target.next_sibling is not None: + assert child.next_element is target.next_sibling, \ + "Bad next_element\nNODE: {}\nNEXT {}\nEXPECTED {}".format( + child, child.next_element, target.next_sibling + ) + break + target = target.parent + + # We are done, so nothing to return + return None + else: + # Return the child to the recursive caller + return child + + def assert_selects(self, tags, should_match): + """Make sure that the given tags have the correct text. + + This is used in tests that define a bunch of tags, each + containing a single string, and then select certain strings by + some mechanism. + """ + assert [tag.string for tag in tags] == should_match + + def assert_selects_ids(self, tags, should_match): + """Make sure that the given tags have the correct IDs. + + This is used in tests that define a bunch of tags, each + containing a single string, and then select certain strings by + some mechanism. + """ + assert [tag['id'] for tag in tags] == should_match + + +class TreeBuilderSmokeTest(object): + # Tests that are common to HTML and XML tree builders. + + @pytest.mark.parametrize( + "multi_valued_attributes", + [None, {}, dict(b=['class']), {'*': ['notclass']}] + ) + def test_attribute_not_multi_valued(self, multi_valued_attributes): + markup = '' + soup = self.soup(markup, multi_valued_attributes=multi_valued_attributes) + assert soup.a['class'] == 'a b c' + + @pytest.mark.parametrize( + "multi_valued_attributes", [dict(a=['class']), {'*': ['class']}] + ) + def test_attribute_multi_valued(self, multi_valued_attributes): + markup = '' + soup = self.soup( + markup, multi_valued_attributes=multi_valued_attributes + ) + assert soup.a['class'] == ['a', 'b', 'c'] + + def test_invalid_doctype(self): + markup = 'content' + markup = '' + soup = self.soup(markup) + +class HTMLTreeBuilderSmokeTest(TreeBuilderSmokeTest): + + """A basic test of a treebuilder's competence. + + Any HTML treebuilder, present or future, should be able to pass + these tests. With invalid markup, there's room for interpretation, + and different parsers can handle it differently. But with the + markup in these tests, there's not much room for interpretation. + """ + + def test_empty_element_tags(self): + """Verify that all HTML4 and HTML5 empty element (aka void element) tags + are handled correctly. + """ + for name in [ + 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr', + 'spacer', 'frame' + ]: + soup = self.soup("") + new_tag = soup.new_tag(name) + assert new_tag.is_empty_element == True + + def test_special_string_containers(self): + soup = self.soup( + "" + ) + assert isinstance(soup.style.string, Stylesheet) + assert isinstance(soup.script.string, Script) + + soup = self.soup( + "" + ) + assert isinstance(soup.style.string, Stylesheet) + # The contents of the style tag resemble an HTML comment, but + # it's not treated as a comment. + assert soup.style.string == "" + assert isinstance(soup.style.string, Stylesheet) + + def test_pickle_and_unpickle_identity(self): + # Pickling a tree, then unpickling it, yields a tree identical + # to the original. + tree = self.soup("foo") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + assert loaded.__class__ == BeautifulSoup + assert loaded.decode() == tree.decode() + + def assertDoctypeHandled(self, doctype_fragment): + """Assert that a given doctype string is handled correctly.""" + doctype_str, soup = self._document_with_doctype(doctype_fragment) + + # Make sure a Doctype object was created. + doctype = soup.contents[0] + assert doctype.__class__ == Doctype + assert doctype == doctype_fragment + assert soup.encode("utf8")[:len(doctype_str)] == doctype_str + + # Make sure that the doctype was correctly associated with the + # parse tree and that the rest of the document parsed. + assert soup.p.contents[0] == 'foo' + + def _document_with_doctype(self, doctype_fragment, doctype_string="DOCTYPE"): + """Generate and parse a document with the given doctype.""" + doctype = '' % (doctype_string, doctype_fragment) + markup = doctype + '\n

foo

' + soup = self.soup(markup) + return doctype.encode("utf8"), soup + + def test_normal_doctypes(self): + """Make sure normal, everyday HTML doctypes are handled correctly.""" + self.assertDoctypeHandled("html") + self.assertDoctypeHandled( + 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"') + + def test_empty_doctype(self): + soup = self.soup("") + doctype = soup.contents[0] + assert "" == doctype.strip() + + def test_mixed_case_doctype(self): + # A lowercase or mixed-case doctype becomes a Doctype. + for doctype_fragment in ("doctype", "DocType"): + doctype_str, soup = self._document_with_doctype( + "html", doctype_fragment + ) + + # Make sure a Doctype object was created and that the DOCTYPE + # is uppercase. + doctype = soup.contents[0] + assert doctype.__class__ == Doctype + assert doctype == "html" + assert soup.encode("utf8")[:len(doctype_str)] == b"" + + # Make sure that the doctype was correctly associated with the + # parse tree and that the rest of the document parsed. + assert soup.p.contents[0] == 'foo' + + def test_public_doctype_with_url(self): + doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"' + self.assertDoctypeHandled(doctype) + + def test_system_doctype(self): + self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"') + + def test_namespaced_system_doctype(self): + # We can handle a namespaced doctype with a system ID. + self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"') + + def test_namespaced_public_doctype(self): + # Test a namespaced doctype with a public id. + self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"') + + def test_real_xhtml_document(self): + """A real XHTML document should come out more or less the same as it went in.""" + markup = b""" + + +Hello. +Goodbye. +""" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup) + assert soup.encode("utf-8").replace(b"\n", b"") == markup.replace(b"\n", b"") + + # No warning was issued about parsing an XML document as HTML, + # because XHTML is both. + assert w == [] + + + def test_namespaced_html(self): + # When a namespaced XML document is parsed as HTML it should + # be treated as HTML with weird tag names. + markup = b"""content""" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup) + + assert 2 == len(soup.find_all("ns1:foo")) + + # n.b. no "you're parsing XML as HTML" warning was given + # because there was no XML declaration. + assert [] == w + + def test_detect_xml_parsed_as_html(self): + # A warning is issued when parsing an XML document as HTML, + # but basic stuff should still work. + markup = b"""string""" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup) + assert soup.tag.string == 'string' + [warning] = w + assert isinstance(warning.message, XMLParsedAsHTMLWarning) + assert str(warning.message) == XMLParsedAsHTMLWarning.MESSAGE + + # NOTE: the warning is not issued if the document appears to + # be XHTML (tested with test_real_xhtml_document in the + # superclass) or if there is no XML declaration (tested with + # test_namespaced_html in the superclass). + + def test_processing_instruction(self): + # We test both Unicode and bytestring to verify that + # process_markup correctly sets processing_instruction_class + # even when the markup is already Unicode and there is no + # need to process anything. + markup = """""" + soup = self.soup(markup) + assert markup == soup.decode() + + markup = b"""""" + soup = self.soup(markup) + assert markup == soup.encode("utf8") + + def test_deepcopy(self): + """Make sure you can copy the tree builder. + + This is important because the builder is part of a + BeautifulSoup object, and we want to be able to copy that. + """ + copy.deepcopy(self.default_builder) + + def test_p_tag_is_never_empty_element(self): + """A

tag is never designated as an empty-element tag. + + Even if the markup shows it as an empty-element tag, it + shouldn't be presented that way. + """ + soup = self.soup("

") + assert not soup.p.is_empty_element + assert str(soup.p) == "

" + + def test_unclosed_tags_get_closed(self): + """A tag that's not closed by the end of the document should be closed. + + This applies to all tags except empty-element tags. + """ + self.assert_soup("

", "

") + self.assert_soup("", "") + + self.assert_soup("
", "
") + + def test_br_is_always_empty_element_tag(self): + """A
tag is designated as an empty-element tag. + + Some parsers treat

as one
tag, some parsers as + two tags, but it should always be an empty-element tag. + """ + soup = self.soup("

") + assert soup.br.is_empty_element + assert str(soup.br) == "
" + + def test_nested_formatting_elements(self): + self.assert_soup("") + + def test_double_head(self): + html = ''' + + +Ordinary HEAD element test + + + +Hello, world! + + +''' + soup = self.soup(html) + assert "text/javascript" == soup.find('script')['type'] + + def test_comment(self): + # Comments are represented as Comment objects. + markup = "

foobaz

" + self.assert_soup(markup) + + soup = self.soup(markup) + comment = soup.find(string="foobar") + assert comment.__class__ == Comment + + # The comment is properly integrated into the tree. + foo = soup.find(string="foo") + assert comment == foo.next_element + baz = soup.find(string="baz") + assert comment == baz.previous_element + + def test_preserved_whitespace_in_pre_and_textarea(self): + """Whitespace must be preserved in
 and \n"
+        self.assert_soup(pre_markup)
+        self.assert_soup(textarea_markup)
+
+        soup = self.soup(pre_markup)
+        assert soup.pre.prettify() == pre_markup
+
+        soup = self.soup(textarea_markup)
+        assert soup.textarea.prettify() == textarea_markup
+
+        soup = self.soup("")
+        assert soup.textarea.prettify() == "\n"
+
+    def test_nested_inline_elements(self):
+        """Inline elements can be nested indefinitely."""
+        b_tag = "Inside a B tag"
+        self.assert_soup(b_tag)
+
+        nested_b_tag = "

A nested tag

" + self.assert_soup(nested_b_tag) + + double_nested_b_tag = "

A doubly nested tag

" + self.assert_soup(nested_b_tag) + + def test_nested_block_level_elements(self): + """Block elements can be nested.""" + soup = self.soup('

Foo

') + blockquote = soup.blockquote + assert blockquote.p.b.string == 'Foo' + assert blockquote.b.string == 'Foo' + + def test_correctly_nested_tables(self): + """One table can go inside another one.""" + markup = ('' + '' + "') + + self.assert_soup( + markup, + '
Here's another table:" + '' + '' + '
foo
Here\'s another table:' + '
foo
' + '
') + + self.assert_soup( + "" + "" + "
Foo
Bar
Baz
") + + def test_multivalued_attribute_with_whitespace(self): + # Whitespace separating the values of a multi-valued attribute + # should be ignored. + + markup = '
' + soup = self.soup(markup) + assert ['foo', 'bar'] == soup.div['class'] + + # If you search by the literal name of the class it's like the whitespace + # wasn't there. + assert soup.div == soup.find('div', class_="foo bar") + + def test_deeply_nested_multivalued_attribute(self): + # html5lib can set the attributes of the same tag many times + # as it rearranges the tree. This has caused problems with + # multivalued attributes. + markup = '
' + soup = self.soup(markup) + assert ["css"] == soup.div.div['class'] + + def test_multivalued_attribute_on_html(self): + # html5lib uses a different API to set the attributes ot the + # tag. This has caused problems with multivalued + # attributes. + markup = '' + soup = self.soup(markup) + assert ["a", "b"] == soup.html['class'] + + def test_angle_brackets_in_attribute_values_are_escaped(self): + self.assert_soup('', '') + + def test_strings_resembling_character_entity_references(self): + # "&T" and "&p" look like incomplete character entities, but they are + # not. + self.assert_soup( + "

• AT&T is in the s&p 500

", + "

\u2022 AT&T is in the s&p 500

" + ) + + def test_apos_entity(self): + self.assert_soup( + "

Bob's Bar

", + "

Bob's Bar

", + ) + + def test_entities_in_foreign_document_encoding(self): + # “ and ” are invalid numeric entities referencing + # Windows-1252 characters. - references a character common + # to Windows-1252 and Unicode, and ☃ references a + # character only found in Unicode. + # + # All of these entities should be converted to Unicode + # characters. + markup = "

“Hello” -☃

" + soup = self.soup(markup) + assert "“Hello” -☃" == soup.p.string + + def test_entities_in_attributes_converted_to_unicode(self): + expect = '

' + self.assert_soup('

', expect) + self.assert_soup('

', expect) + self.assert_soup('

', expect) + self.assert_soup('

', expect) + + def test_entities_in_text_converted_to_unicode(self): + expect = '

pi\N{LATIN SMALL LETTER N WITH TILDE}ata

' + self.assert_soup("

piñata

", expect) + self.assert_soup("

piñata

", expect) + self.assert_soup("

piñata

", expect) + self.assert_soup("

piñata

", expect) + + def test_quot_entity_converted_to_quotation_mark(self): + self.assert_soup("

I said "good day!"

", + '

I said "good day!"

') + + def test_out_of_range_entity(self): + expect = "\N{REPLACEMENT CHARACTER}" + self.assert_soup("�", expect) + self.assert_soup("�", expect) + self.assert_soup("�", expect) + + def test_multipart_strings(self): + "Mostly to prevent a recurrence of a bug in the html5lib treebuilder." + soup = self.soup("

\nfoo

") + assert "p" == soup.h2.string.next_element.name + assert "p" == soup.p.name + self.assertConnectedness(soup) + + def test_empty_element_tags(self): + """Verify consistent handling of empty-element tags, + no matter how they come in through the markup. + """ + self.assert_soup('


', "


") + self.assert_soup('


', "


") + + def test_head_tag_between_head_and_body(self): + "Prevent recurrence of a bug in the html5lib treebuilder." + content = """ + + foo + +""" + soup = self.soup(content) + assert soup.html.body is not None + self.assertConnectedness(soup) + + def test_multiple_copies_of_a_tag(self): + "Prevent recurrence of a bug in the html5lib treebuilder." + content = """ + + + + + +""" + soup = self.soup(content) + self.assertConnectedness(soup.article) + + def test_basic_namespaces(self): + """Parsers don't need to *understand* namespaces, but at the + very least they should not choke on namespaces or lose + data.""" + + markup = b'4' + soup = self.soup(markup) + assert markup == soup.encode() + html = soup.html + assert 'http://www.w3.org/1999/xhtml' == soup.html['xmlns'] + assert 'http://www.w3.org/1998/Math/MathML' == soup.html['xmlns:mathml'] + assert 'http://www.w3.org/2000/svg' == soup.html['xmlns:svg'] + + def test_multivalued_attribute_value_becomes_list(self): + markup = b'' + soup = self.soup(markup) + assert ['foo', 'bar'] == soup.a['class'] + + # + # Generally speaking, tests below this point are more tests of + # Beautiful Soup than tests of the tree builders. But parsers are + # weird, so we run these tests separately for every tree builder + # to detect any differences between them. + # + + def test_can_parse_unicode_document(self): + # A seemingly innocuous document... but it's in Unicode! And + # it contains characters that can't be represented in the + # encoding found in the declaration! The horror! + markup = 'Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' + soup = self.soup(markup) + assert 'Sacr\xe9 bleu!' == soup.body.string + + def test_soupstrainer(self): + """Parsers should be able to work with SoupStrainers.""" + strainer = SoupStrainer("b") + soup = self.soup("A bold statement", + parse_only=strainer) + assert soup.decode() == "bold" + + def test_single_quote_attribute_values_become_double_quotes(self): + self.assert_soup("", + '') + + def test_attribute_values_with_nested_quotes_are_left_alone(self): + text = """a""" + self.assert_soup(text) + + def test_attribute_values_with_double_nested_quotes_get_quoted(self): + text = """a""" + soup = self.soup(text) + soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"' + self.assert_soup( + soup.foo.decode(), + """a""") + + def test_ampersand_in_attribute_value_gets_escaped(self): + self.assert_soup('', + '') + + self.assert_soup( + 'foo', + 'foo') + + def test_escaped_ampersand_in_attribute_value_is_left_alone(self): + self.assert_soup('') + + def test_entities_in_strings_converted_during_parsing(self): + # Both XML and HTML entities are converted to Unicode characters + # during parsing. + text = "

<<sacré bleu!>>

" + expected = "

<<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>>

" + self.assert_soup(text, expected) + + def test_smart_quotes_converted_on_the_way_in(self): + # Microsoft smart quotes are converted to Unicode characters during + # parsing. + quote = b"

\x91Foo\x92

" + soup = self.soup(quote) + assert soup.p.string == "\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}" + + def test_non_breaking_spaces_converted_on_the_way_in(self): + soup = self.soup("  ") + assert soup.a.string == "\N{NO-BREAK SPACE}" * 2 + + def test_entities_converted_on_the_way_out(self): + text = "

<<sacré bleu!>>

" + expected = "

<<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>>

".encode("utf-8") + soup = self.soup(text) + assert soup.p.encode("utf-8") == expected + + def test_real_iso_8859_document(self): + # Smoke test of interrelated functionality, using an + # easy-to-understand document. + + # Here it is in Unicode. Note that it claims to be in ISO-8859-1. + unicode_html = '

Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!

' + + # That's because we're going to encode it into ISO-8859-1, + # and use that to test. + iso_latin_html = unicode_html.encode("iso-8859-1") + + # Parse the ISO-8859-1 HTML. + soup = self.soup(iso_latin_html) + + # Encode it to UTF-8. + result = soup.encode("utf-8") + + # What do we expect the result to look like? Well, it would + # look like unicode_html, except that the META tag would say + # UTF-8 instead of ISO-8859-1. + expected = unicode_html.replace("ISO-8859-1", "utf-8") + + # And, of course, it would be in UTF-8, not Unicode. + expected = expected.encode("utf-8") + + # Ta-da! + assert result == expected + + def test_real_shift_jis_document(self): + # Smoke test to make sure the parser can handle a document in + # Shift-JIS encoding, without choking. + shift_jis_html = ( + b'
'
+            b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
+            b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
+            b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
+            b'
') + unicode_html = shift_jis_html.decode("shift-jis") + soup = self.soup(unicode_html) + + # Make sure the parse tree is correctly encoded to various + # encodings. + assert soup.encode("utf-8") == unicode_html.encode("utf-8") + assert soup.encode("euc_jp") == unicode_html.encode("euc_jp") + + def test_real_hebrew_document(self): + # A real-world test to make sure we can convert ISO-8859-9 (a + # Hebrew encoding) to UTF-8. + hebrew_document = b'Hebrew (ISO 8859-8) in Visual Directionality

Hebrew (ISO 8859-8) in Visual Directionality

\xed\xe5\xec\xf9' + soup = self.soup( + hebrew_document, from_encoding="iso8859-8") + # Some tree builders call it iso8859-8, others call it iso-8859-9. + # That's not a difference we really care about. + assert soup.original_encoding in ('iso8859-8', 'iso-8859-8') + assert soup.encode('utf-8') == ( + hebrew_document.decode("iso8859-8").encode("utf-8") + ) + + def test_meta_tag_reflects_current_encoding(self): + # Here's the tag saying that a document is + # encoded in Shift-JIS. + meta_tag = ('') + + # Here's a document incorporating that meta tag. + shift_jis_html = ( + '\n%s\n' + '' + 'Shift-JIS markup goes here.') % meta_tag + soup = self.soup(shift_jis_html) + + # Parse the document, and the charset is seemingly unaffected. + parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'}) + content = parsed_meta['content'] + assert 'text/html; charset=x-sjis' == content + + # But that value is actually a ContentMetaAttributeValue object. + assert isinstance(content, ContentMetaAttributeValue) + + # And it will take on a value that reflects its current + # encoding. + assert 'text/html; charset=utf8' == content.encode("utf8") + + # For the rest of the story, see TestSubstitutions in + # test_tree.py. + + def test_html5_style_meta_tag_reflects_current_encoding(self): + # Here's the tag saying that a document is + # encoded in Shift-JIS. + meta_tag = ('') + + # Here's a document incorporating that meta tag. + shift_jis_html = ( + '\n%s\n' + '' + 'Shift-JIS markup goes here.') % meta_tag + soup = self.soup(shift_jis_html) + + # Parse the document, and the charset is seemingly unaffected. + parsed_meta = soup.find('meta', id="encoding") + charset = parsed_meta['charset'] + assert 'x-sjis' == charset + + # But that value is actually a CharsetMetaAttributeValue object. + assert isinstance(charset, CharsetMetaAttributeValue) + + # And it will take on a value that reflects its current + # encoding. + assert 'utf8' == charset.encode("utf8") + + def test_python_specific_encodings_not_used_in_charset(self): + # You can encode an HTML document using a Python-specific + # encoding, but that encoding won't be mentioned _inside_ the + # resulting document. Instead, the document will appear to + # have no encoding. + for markup in [ + b'' + b'' + ]: + soup = self.soup(markup) + for encoding in PYTHON_SPECIFIC_ENCODINGS: + if encoding in ( + 'idna', 'mbcs', 'oem', 'undefined', + 'string_escape', 'string-escape' + ): + # For one reason or another, these will raise an + # exception if we actually try to use them, so don't + # bother. + continue + encoded = soup.encode(encoding) + assert b'meta charset=""' in encoded + assert encoding.encode("ascii") not in encoded + + def test_tag_with_no_attributes_can_have_attributes_added(self): + data = self.soup("text") + data.a['foo'] = 'bar' + assert 'text' == data.a.decode() + + def test_closing_tag_with_no_opening_tag(self): + # Without BeautifulSoup.open_tag_counter, the tag will + # cause _popToTag to be called over and over again as we look + # for a tag that wasn't there. The result is that 'text2' + # will show up outside the body of the document. + soup = self.soup("

text1

text2
") + assert "

text1

text2
" == soup.body.decode() + + def test_worst_case(self): + """Test the worst case (currently) for linking issues.""" + + soup = self.soup(BAD_DOCUMENT) + self.linkage_validator(soup) + + +class XMLTreeBuilderSmokeTest(TreeBuilderSmokeTest): + + def test_pickle_and_unpickle_identity(self): + # Pickling a tree, then unpickling it, yields a tree identical + # to the original. + tree = self.soup("foo") + dumped = pickle.dumps(tree, 2) + loaded = pickle.loads(dumped) + assert loaded.__class__ == BeautifulSoup + assert loaded.decode() == tree.decode() + + def test_docstring_generated(self): + soup = self.soup("") + assert soup.encode() == b'\n' + + def test_xml_declaration(self): + markup = b"""\n""" + soup = self.soup(markup) + assert markup == soup.encode("utf8") + + def test_python_specific_encodings_not_used_in_xml_declaration(self): + # You can encode an XML document using a Python-specific + # encoding, but that encoding won't be mentioned _inside_ the + # resulting document. + markup = b"""\n""" + soup = self.soup(markup) + for encoding in PYTHON_SPECIFIC_ENCODINGS: + if encoding in ( + 'idna', 'mbcs', 'oem', 'undefined', + 'string_escape', 'string-escape' + ): + # For one reason or another, these will raise an + # exception if we actually try to use them, so don't + # bother. + continue + encoded = soup.encode(encoding) + assert b'' in encoded + assert encoding.encode("ascii") not in encoded + + def test_processing_instruction(self): + markup = b"""\n""" + soup = self.soup(markup) + assert markup == soup.encode("utf8") + + def test_real_xhtml_document(self): + """A real XHTML document should come out *exactly* the same as it went in.""" + markup = b""" + + +Hello. +Goodbye. +""" + soup = self.soup(markup) + assert soup.encode("utf-8") == markup + + def test_nested_namespaces(self): + doc = b""" + + + + + +""" + soup = self.soup(doc) + assert doc == soup.encode() + + def test_formatter_processes_script_tag_for_xml_documents(self): + doc = """ + +""" + soup = BeautifulSoup(doc, "lxml-xml") + # lxml would have stripped this while parsing, but we can add + # it later. + soup.script.string = 'console.log("< < hey > > ");' + encoded = soup.encode() + assert b"< < hey > >" in encoded + + def test_can_parse_unicode_document(self): + markup = 'Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' + soup = self.soup(markup) + assert 'Sacr\xe9 bleu!' == soup.root.string + + def test_can_parse_unicode_document_begining_with_bom(self): + markup = '\N{BYTE ORDER MARK}Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' + soup = self.soup(markup) + assert 'Sacr\xe9 bleu!' == soup.root.string + + def test_popping_namespaced_tag(self): + markup = 'b2012-07-02T20:33:42Zcd' + soup = self.soup(markup) + assert str(soup.rss) == markup + + def test_docstring_includes_correct_encoding(self): + soup = self.soup("") + assert soup.encode("latin1") == b'\n' + + def test_large_xml_document(self): + """A large XML document should come out the same as it went in.""" + markup = (b'\n' + + b'0' * (2**12) + + b'') + soup = self.soup(markup) + assert soup.encode("utf-8") == markup + + def test_tags_are_empty_element_if_and_only_if_they_are_empty(self): + self.assert_soup("

", "

") + self.assert_soup("

foo

") + + def test_namespaces_are_preserved(self): + markup = 'This tag is in the a namespaceThis tag is in the b namespace' + soup = self.soup(markup) + root = soup.root + assert "http://example.com/" == root['xmlns:a'] + assert "http://example.net/" == root['xmlns:b'] + + def test_closing_namespaced_tag(self): + markup = '

20010504

' + soup = self.soup(markup) + assert str(soup.p) == markup + + def test_namespaced_attributes(self): + markup = '' + soup = self.soup(markup) + assert str(soup.foo) == markup + + def test_namespaced_attributes_xml_namespace(self): + markup = 'bar' + soup = self.soup(markup) + assert str(soup.foo) == markup + + def test_find_by_prefixed_name(self): + doc = """ + + foo + bar + baz + +""" + soup = self.soup(doc) + + # There are three tags. + assert 3 == len(soup.find_all('tag')) + + # But two of them are ns1:tag and one of them is ns2:tag. + assert 2 == len(soup.find_all('ns1:tag')) + assert 1 == len(soup.find_all('ns2:tag')) + + assert 1, len(soup.find_all('ns2:tag', key='value')) + assert 3, len(soup.find_all(['ns1:tag', 'ns2:tag'])) + + def test_copy_tag_preserves_namespace(self): + xml = """ +""" + + soup = self.soup(xml) + tag = soup.document + duplicate = copy.copy(tag) + + # The two tags have the same namespace prefix. + assert tag.prefix == duplicate.prefix + + def test_worst_case(self): + """Test the worst case (currently) for linking issues.""" + + soup = self.soup(BAD_DOCUMENT) + self.linkage_validator(soup) + + +class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest): + """Smoke test for a tree builder that supports HTML5.""" + + def test_real_xhtml_document(self): + # Since XHTML is not HTML5, HTML5 parsers are not tested to handle + # XHTML documents in any particular way. + pass + + def test_html_tags_have_namespace(self): + markup = "" + soup = self.soup(markup) + assert "http://www.w3.org/1999/xhtml" == soup.a.namespace + + def test_svg_tags_have_namespace(self): + markup = '' + soup = self.soup(markup) + namespace = "http://www.w3.org/2000/svg" + assert namespace == soup.svg.namespace + assert namespace == soup.circle.namespace + + + def test_mathml_tags_have_namespace(self): + markup = '5' + soup = self.soup(markup) + namespace = 'http://www.w3.org/1998/Math/MathML' + assert namespace == soup.math.namespace + assert namespace == soup.msqrt.namespace + + def test_xml_declaration_becomes_comment(self): + markup = '' + soup = self.soup(markup) + assert isinstance(soup.contents[0], Comment) + assert soup.contents[0] == '?xml version="1.0" encoding="utf-8"?' + assert "html" == soup.contents[0].next_element.name diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256.testcase new file mode 100644 index 00000000..4828f8a4 --- /dev/null +++ b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256.testcase @@ -0,0 +1 @@ +

\ No newline at end of file diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5000587759190016.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5000587759190016.testcase new file mode 100644 index 0000000000000000000000000000000000000000..8a585ce9270e58b125922297f2d099298adac822 GIT binary patch literal 15347 zcmeHOKW`H;6u+P#aRU+(18Xxd6l%E*Nl7L5s!A+KhygMnL5ETmLXBVgZv$er@K!|sD^YeJSDOAWa&ttPlG6X?Atl{} z^qDpjS}5Y|WIWoTq(csczd4*??66BhVoGhy6y1ZDBgWIhQ~nf$4?LB=5tQo zxX3XIMJ!OhCjE@@G@nnHL!!ULItAw?J4Na|Tz{fnYNoHpCUhroxQ^Xm|6~3AZ`jNU z_1CN>Z8L2Ov5UvVto$ukPyf~8Z|ZG_z+Cw)}rtPVgJ$Y!^aQq_qB2x+d@%#VL&Ee zscL#D6CO7NUrf^%e&SnKww(HyG#lR3!%5nHqKHLiq!Uq%XT(ATAI~R!c+#u~ktD)W z#Z-lH42O%htTt3XpZZq_8E}9wb>zG-$Bf4}bwbGBM0e%#VflQ3>E%3$>n4SKSi0u< zER%m$TsEp5ifAh3ZBVWkB$KNF37662`3#lv#vffpN$8m zN~NsW&nsp9m$XI|Y?vkxR-@e#J2vfv={I9yD{8L>?bJ5jl{m^X>DgR>Pz4nzY55>9 zaJF$P4nM{r0N0v8sLEMw1+^QiLKN+(xQwxB!0bXrBv?Z{w${rqtoY4tO&Vk2ajs38ITfhY1|%rRV$pMf6PhuP>A~gb(by@6 zIYN?5j_g2E=LC?WXmn7m$gyJwVL_x)6E>WMna*0~7+ng6=am8TKe^U~eF_~XCRZkW z#PW{M(|AZY!jT)*wowa!XyA7C+-+hS%aO@_kcpK7$) zCWCD<@Kz>oC}@WX+hj0SGTJ5s9fT9I*)juIwVk>gO$LI&wIfvqDdYl5f{7)#xCTIF z2SI2-qp#%56T}6sGt3_+us`Vd;ONzB-y@~Ll>LlA@XZVW@(&R0YF literal 0 HcmV?d00001 diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5167584867909632.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5167584867909632.testcase new file mode 100644 index 0000000000000000000000000000000000000000..0fe66dd23fe94a8e451dfb1a84da38ed0984c1f9 GIT binary patch literal 19469 zcmeHP%}&BV5UwW1c<}1c&|0d-m{hN9YYZMVF=z-E6E3Azz?Aa07`*ugp1gb37w}np z1J6Ey_J;^Y+hVrDb~+?vcLObSXLo15`M%w!71Cc4g4b8Y>@_=7*S$g~EsGkt2Bn={ zt2=1kXi~B5P|KmamD`Rb<;p=_zIm%4C&qy-Vu088oOsuGz>Kq!-D0_0%~Ls_F?gfesF!B~xlP=*UCbCH?n*-O%0 z!Q*F>?46hl=N3}p9^s$pfQ;#K?C}VRg&?#Cu4Gl`M+NhN`Lsc{A^94o$;0t3y>S870B{5&zz9DFt<}-BMx(8V;~9 zu^`3{e->4K<9oQkG(Mym>pheEPdQ^GK5fr%nc+{ydd)pa{!emW8I*DurdQ@~y8Kk4 zgV7-WU60=nl>ER&`8c&A8JzU^4}@gt+g^h^gT@rwKLtV<2l@NX63OYId;^?(0~P60 z?9W6QN1A0E6WR+_CBVe3&|S@XJ2R{CP{xr=dJz@rVH;L3n4Q9zrUy|Y_Y}rG!O4js mqyYzMPzM!p)C@wIG(#Cwq{BE*384&|DuI`yvfSgo2>k@d3|T1v literal 0 HcmV?d00001 diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5270998950477824.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5270998950477824.testcase new file mode 100644 index 0000000000000000000000000000000000000000..fd411427d765e9b17e1e6a015485a70632bb9554 GIT binary patch literal 12 TcmY$eDbjbbRX@cb^ZOhC75W5K literal 0 HcmV?d00001 diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000.testcase new file mode 100644 index 00000000..6248b2c5 --- /dev/null +++ b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000.testcase @@ -0,0 +1 @@ + >tet>< \ No newline at end of file diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5492400320282624.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5492400320282624.testcase new file mode 100644 index 0000000000000000000000000000000000000000..107da539fc15bd1664e35287ace9feda21cb5820 GIT binary patch literal 11502 zcmeHNy>HV%6gOf+VqhSEDxuawNKi#?;xtXg#SCTdK!q3y#cd3AnkG&P3ldWY7XAYy zCT0f2N_Q3{#Kh8pfq{iTAiQ_)?sEPrNt;$FI=4Ds&hO(s-o1P8XBP^E%c~a(bbUIR z>pt6&8VYYetR1|1@3DT}-suHUu57m=zZDhNUbKCCoqp6en~h@8f`8Us?*#40@z@;v zvWixx>qm8ug?`kSZAMZ199wmM)@|Fljl~QWU$J;93#i=Gnw~*BlHQ#|0?zbH*5 zb%|;A>ykT1x$T5V0o|~j6!-jKFyP&Q0fx?{e_SD_qO!^BSE=P1b3*t-8C14p{iSNc z2V3ItzI+T%ukbpRx(HH0EkaF-OQ}&>bS_;QQIeM5QcmbNA;V7VgCSi+Ka`)}b4~+u zIOf`Y|F-|bcGS`*xVc><+HeK=DuR7kbb;M|VG*n$UVC7lv5TH2TY-e=Ct3={xQ+R4 zwyZppvo(W_ZxdtZ*>Zz`2*oa8?n9GW;CglXx z<}63=pqMsNj5^H-eT8Y@E zP=-sP&U&-KBcEQhQFN;n4^K@xO-DQbCK=e7a4zp`HQ}5I=Oz-?nQ%^KGA3<_C5= zlRW%*ktfj|g`Fl))G`RHqfkU^=K$nAEVm4Oss1xm@?`cmF%gp;&h|LjDKbyFFvm(o zWHM`)cSw5|y>6c;zMKmSoRv`l zPeWS%fLuIZqeLOxJve;>FPgW-2pF5U#T0&+x5X53 zjQLv8ye*brDwwY#Fm>Y}L;QzdMc~-Z@auB}FN(+qLgC{?R>|o0jGKi4oeBP!uvKWO zmx{C@5I3Jxz--`-k(ePz3Jtudax^kV11};Vc?{4p4le@a3||N1H!?MkT}(oo#aihv D9d3o5 literal 0 HcmV?d00001 diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912.testcase new file mode 100644 index 00000000..367106c7 --- /dev/null +++ b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912.testcase @@ -0,0 +1,2 @@ + +t \ No newline at end of file diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400.testcase new file mode 100644 index 0000000000000000000000000000000000000000..a823d557b4bebed8a9cdb880d281c9de69a1aa0a GIT binary patch literal 3546 zcmdP2$xSTDup1>uLtr!nMnhmU1V%$(Gz3ONU^E0qLtw~uwSNNv*}xU^oQ!vv$DtbH{XnB<{4q&MBMIvxRFa&-_pUv zaZ?A@)wk1eCmnC=bU*Q=8+sfD%u9F-wunGF9Af5kU{XeIH<5gU7$((-f1t0?R%dI;Jq*Qch%_#M(x|K>?1-n+*fFB zm6{d^8#<{hj+7bdpv`Q#2CcfFkUioNUyXr5z+=fRP%u7K(G*r#6$M^Z_&u?|;XuBN z@N6(okxftZk53XI6>lL`P*y0e{H4&t=5>jM|{*N|q#^8%sAhqM+Y3%AITJ zj+ie=x^@H_p!D;o#ZMNvPoPiY!4dto?gr%>8ZfxWSt4CK@!+{gLI_zD^Y~@@!h9AT zhwtJ(69K8%&(5%E2fpgGkYsM!X=s?z8oHP}&TI@w9WRoXm;n8Ha{-ib=yrZ_~b; zLif2&eN24}r`0u!2Q^n!;Z(g+b!QW%0&VHg*@TwX)&r$x>#WdFWGZjuEVhzaKfVQc zE1wv%%}(9W_#3HHZI>9f_+=c57M9cCNo@`2bnw)yoeHG+-$q-W|1arjzM|GxoYGoC z;*bTH!DdtxtcOJ%%uC0=se>6mD|1!l76U4^eq4Nq!{I_ly+tmKJqqfTb*ceU6z362 ztyNgQGww04iSboml65FWdBXI!jzWnWgo#v19R>P(S{7XznX3V~Twq+Re44__&~>h> q43wHvMb7{ew`wA?1}AsyPlT5DF{Md(5}xqoqavh!LXgNIAN~adfn_KF literal 0 HcmV?d00001 diff --git a/lib/python3.11/site-packages/bs4/tests/fuzz/crash-ffbdfa8a2b26f13537b68d3794b0478a4090ee4a.testcase b/lib/python3.11/site-packages/bs4/tests/fuzz/crash-ffbdfa8a2b26f13537b68d3794b0478a4090ee4a.testcase new file mode 100644 index 0000000000000000000000000000000000000000..885711554056588a381fd9a251d48ce76a916a59 GIT binary patch literal 103 zcma!IT*$!C&%p3|?)u|48N3V(Hi{|v$wno$1*wb-3(;hvkz^S9^*|aEj@#ts@G|Jx OROIF`00A~ppfmtd#~j4~ literal 0 HcmV?d00001 diff --git a/lib/python3.11/site-packages/bs4/tests/test_builder.py b/lib/python3.11/site-packages/bs4/tests/test_builder.py new file mode 100644 index 00000000..75370712 --- /dev/null +++ b/lib/python3.11/site-packages/bs4/tests/test_builder.py @@ -0,0 +1,29 @@ +import pytest +from unittest.mock import patch +from bs4.builder import DetectsXMLParsedAsHTML + +class TestDetectsXMLParsedAsHTML(object): + + @pytest.mark.parametrize( + "markup,looks_like_xml", + [("No xml declaration", False), + ("obviously HTMLActually XHTML", False), + (" < html>Tricky XHTML", False), + ("", True), + ] + ) + def test_warn_if_markup_looks_like_xml(self, markup, looks_like_xml): + # Test of our ability to guess at whether markup looks XML-ish + # _and_ not HTML-ish. + with patch('bs4.builder.DetectsXMLParsedAsHTML._warn') as mock: + for data in markup, markup.encode('utf8'): + result = DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( + data + ) + assert result == looks_like_xml + if looks_like_xml: + assert mock.called + else: + assert not mock.called + mock.reset_mock() diff --git a/lib/python3.11/site-packages/bs4/tests/test_builder_registry.py b/lib/python3.11/site-packages/bs4/tests/test_builder_registry.py new file mode 100644 index 00000000..9327174f --- /dev/null +++ b/lib/python3.11/site-packages/bs4/tests/test_builder_registry.py @@ -0,0 +1,137 @@ +"""Tests of the builder registry.""" + +import pytest +import warnings + +from bs4 import BeautifulSoup +from bs4.builder import ( + builder_registry as registry, + HTMLParserTreeBuilder, + TreeBuilderRegistry, +) + +from . import ( + HTML5LIB_PRESENT, + LXML_PRESENT, +) + +if HTML5LIB_PRESENT: + from bs4.builder import HTML5TreeBuilder + +if LXML_PRESENT: + from bs4.builder import ( + LXMLTreeBuilderForXML, + LXMLTreeBuilder, + ) + + +# TODO: Split out the lxml and html5lib tests into their own classes +# and gate with pytest.mark.skipIf. +class TestBuiltInRegistry(object): + """Test the built-in registry with the default builders registered.""" + + def test_combination(self): + assert registry.lookup('strict', 'html') == HTMLParserTreeBuilder + if LXML_PRESENT: + assert registry.lookup('fast', 'html') == LXMLTreeBuilder + assert registry.lookup('permissive', 'xml') == LXMLTreeBuilderForXML + if HTML5LIB_PRESENT: + assert registry.lookup('html5lib', 'html') == HTML5TreeBuilder + + def test_lookup_by_markup_type(self): + if LXML_PRESENT: + assert registry.lookup('html') == LXMLTreeBuilder + assert registry.lookup('xml') == LXMLTreeBuilderForXML + else: + assert registry.lookup('xml') == None + if HTML5LIB_PRESENT: + assert registry.lookup('html') == HTML5TreeBuilder + else: + assert registry.lookup('html') == HTMLParserTreeBuilder + + def test_named_library(self): + if LXML_PRESENT: + assert registry.lookup('lxml', 'xml') == LXMLTreeBuilderForXML + assert registry.lookup('lxml', 'html') == LXMLTreeBuilder + if HTML5LIB_PRESENT: + assert registry.lookup('html5lib') == HTML5TreeBuilder + + assert registry.lookup('html.parser') == HTMLParserTreeBuilder + + def test_beautifulsoup_constructor_does_lookup(self): + + with warnings.catch_warnings(record=True) as w: + # This will create a warning about not explicitly + # specifying a parser, but we'll ignore it. + + # You can pass in a string. + BeautifulSoup("", features="html") + # Or a list of strings. + BeautifulSoup("", features=["html", "fast"]) + pass + + # You'll get an exception if BS can't find an appropriate + # builder. + with pytest.raises(ValueError): + BeautifulSoup("", features="no-such-feature") + +class TestRegistry(object): + """Test the TreeBuilderRegistry class in general.""" + + def setup_method(self): + self.registry = TreeBuilderRegistry() + + def builder_for_features(self, *feature_list): + cls = type('Builder_' + '_'.join(feature_list), + (object,), {'features' : feature_list}) + + self.registry.register(cls) + return cls + + def test_register_with_no_features(self): + builder = self.builder_for_features() + + # Since the builder advertises no features, you can't find it + # by looking up features. + assert self.registry.lookup('foo') is None + + # But you can find it by doing a lookup with no features, if + # this happens to be the only registered builder. + assert self.registry.lookup() == builder + + def test_register_with_features_makes_lookup_succeed(self): + builder = self.builder_for_features('foo', 'bar') + assert self.registry.lookup('foo') is builder + assert self.registry.lookup('bar') is builder + + def test_lookup_fails_when_no_builder_implements_feature(self): + builder = self.builder_for_features('foo', 'bar') + assert self.registry.lookup('baz') is None + + def test_lookup_gets_most_recent_registration_when_no_feature_specified(self): + builder1 = self.builder_for_features('foo') + builder2 = self.builder_for_features('bar') + assert self.registry.lookup() == builder2 + + def test_lookup_fails_when_no_tree_builders_registered(self): + assert self.registry.lookup() is None + + def test_lookup_gets_most_recent_builder_supporting_all_features(self): + has_one = self.builder_for_features('foo') + has_the_other = self.builder_for_features('bar') + has_both_early = self.builder_for_features('foo', 'bar', 'baz') + has_both_late = self.builder_for_features('foo', 'bar', 'quux') + lacks_one = self.builder_for_features('bar') + has_the_other = self.builder_for_features('foo') + + # There are two builders featuring 'foo' and 'bar', but + # the one that also features 'quux' was registered later. + assert self.registry.lookup('foo', 'bar') == has_both_late + + # There is only one builder featuring 'foo', 'bar', and 'baz'. + assert self.registry.lookup('foo', 'bar', 'baz') == has_both_early + + def test_lookup_fails_when_cannot_reconcile_requested_features(self): + builder1 = self.builder_for_features('foo', 'bar') + builder2 = self.builder_for_features('foo', 'baz') + assert self.registry.lookup('bar', 'baz') is None diff --git a/lib/python3.11/site-packages/bs4/tests/test_css.py b/lib/python3.11/site-packages/bs4/tests/test_css.py new file mode 100644 index 00000000..359dbcd2 --- /dev/null +++ b/lib/python3.11/site-packages/bs4/tests/test_css.py @@ -0,0 +1,487 @@ +import pytest +import types +from unittest.mock import MagicMock + +from bs4 import ( + CSS, + BeautifulSoup, + ResultSet, +) + +from . import ( + SoupTest, + SOUP_SIEVE_PRESENT, +) + +if SOUP_SIEVE_PRESENT: + from soupsieve import SelectorSyntaxError + + +@pytest.mark.skipif(not SOUP_SIEVE_PRESENT, reason="Soup Sieve not installed") +class TestCSSSelectors(SoupTest): + """Test basic CSS selector functionality. + + This functionality is implemented in soupsieve, which has a much + more comprehensive test suite, so this is basically an extra check + that soupsieve works as expected. + """ + + HTML = """ + + + +The title + + + +Hello there. +
+
+

An H1

+

Some text

+

Some more text

+

An H2

+

Another

+Bob +

Another H2

+me + +span1a1 +span1a2 test + +span2a1 + + + +
+ +
+ + + + + + + + +

English

+

English UK

+

English US

+

French

+
+ + +""" + + def setup_method(self): + self.soup = BeautifulSoup(self.HTML, 'html.parser') + + def assert_selects(self, selector, expected_ids, **kwargs): + results = self.soup.select(selector, **kwargs) + assert isinstance(results, ResultSet) + el_ids = [el['id'] for el in results] + el_ids.sort() + expected_ids.sort() + assert expected_ids == el_ids, "Selector %s, expected [%s], got [%s]" % ( + selector, ', '.join(expected_ids), ', '.join(el_ids) + ) + + assertSelect = assert_selects + + def assert_select_multiple(self, *tests): + for selector, expected_ids in tests: + self.assert_selects(selector, expected_ids) + + def test_precompiled(self): + sel = self.soup.css.compile('div') + + els = self.soup.select(sel) + assert len(els) == 4 + for div in els: + assert div.name == 'div' + + el = self.soup.select_one(sel) + assert 'main' == el['id'] + + def test_one_tag_one(self): + els = self.soup.select('title') + assert len(els) == 1 + assert els[0].name == 'title' + assert els[0].contents == ['The title'] + + def test_one_tag_many(self): + els = self.soup.select('div') + assert len(els) == 4 + for div in els: + assert div.name == 'div' + + el = self.soup.select_one('div') + assert 'main' == el['id'] + + def test_select_one_returns_none_if_no_match(self): + match = self.soup.select_one('nonexistenttag') + assert None == match + + + def test_tag_in_tag_one(self): + els = self.soup.select('div div') + self.assert_selects('div div', ['inner', 'data1']) + + def test_tag_in_tag_many(self): + for selector in ('html div', 'html body div', 'body div'): + self.assert_selects(selector, ['data1', 'main', 'inner', 'footer']) + + + def test_limit(self): + self.assert_selects('html div', ['main'], limit=1) + self.assert_selects('html body div', ['inner', 'main'], limit=2) + self.assert_selects('body div', ['data1', 'main', 'inner', 'footer'], + limit=10) + + def test_tag_no_match(self): + assert len(self.soup.select('del')) == 0 + + def test_invalid_tag(self): + with pytest.raises(SelectorSyntaxError): + self.soup.select('tag%t') + + def test_select_dashed_tag_ids(self): + self.assert_selects('custom-dashed-tag', ['dash1', 'dash2']) + + def test_select_dashed_by_id(self): + dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]') + assert dashed[0].name == 'custom-dashed-tag' + assert dashed[0]['id'] == 'dash2' + + def test_dashed_tag_text(self): + assert self.soup.select('body > custom-dashed-tag')[0].text == 'Hello there.' + + def test_select_dashed_matches_find_all(self): + assert self.soup.select('custom-dashed-tag') == self.soup.find_all('custom-dashed-tag') + + def test_header_tags(self): + self.assert_select_multiple( + ('h1', ['header1']), + ('h2', ['header2', 'header3']), + ) + + def test_class_one(self): + for selector in ('.onep', 'p.onep', 'html p.onep'): + els = self.soup.select(selector) + assert len(els) == 1 + assert els[0].name == 'p' + assert els[0]['class'] == ['onep'] + + def test_class_mismatched_tag(self): + els = self.soup.select('div.onep') + assert len(els) == 0 + + def test_one_id(self): + for selector in ('div#inner', '#inner', 'div div#inner'): + self.assert_selects(selector, ['inner']) + + def test_bad_id(self): + els = self.soup.select('#doesnotexist') + assert len(els) == 0 + + def test_items_in_id(self): + els = self.soup.select('div#inner p') + assert len(els) == 3 + for el in els: + assert el.name == 'p' + assert els[1]['class'] == ['onep'] + assert not els[0].has_attr('class') + + def test_a_bunch_of_emptys(self): + for selector in ('div#main del', 'div#main div.oops', 'div div#main'): + assert len(self.soup.select(selector)) == 0 + + def test_multi_class_support(self): + for selector in ('.class1', 'p.class1', '.class2', 'p.class2', + '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'): + self.assert_selects(selector, ['pmulti']) + + def test_multi_class_selection(self): + for selector in ('.class1.class3', '.class3.class2', + '.class1.class2.class3'): + self.assert_selects(selector, ['pmulti']) + + def test_child_selector(self): + self.assert_selects('.s1 > a', ['s1a1', 's1a2']) + self.assert_selects('.s1 > a span', ['s1a2s1']) + + def test_child_selector_id(self): + self.assert_selects('.s1 > a#s1a2 span', ['s1a2s1']) + + def test_attribute_equals(self): + self.assert_select_multiple( + ('p[class="onep"]', ['p1']), + ('p[id="p1"]', ['p1']), + ('[class="onep"]', ['p1']), + ('[id="p1"]', ['p1']), + ('link[rel="stylesheet"]', ['l1']), + ('link[type="text/css"]', ['l1']), + ('link[href="blah.css"]', ['l1']), + ('link[href="no-blah.css"]', []), + ('[rel="stylesheet"]', ['l1']), + ('[type="text/css"]', ['l1']), + ('[href="blah.css"]', ['l1']), + ('[href="no-blah.css"]', []), + ('p[href="no-blah.css"]', []), + ('[href="no-blah.css"]', []), + ) + + def test_attribute_tilde(self): + self.assert_select_multiple( + ('p[class~="class1"]', ['pmulti']), + ('p[class~="class2"]', ['pmulti']), + ('p[class~="class3"]', ['pmulti']), + ('[class~="class1"]', ['pmulti']), + ('[class~="class2"]', ['pmulti']), + ('[class~="class3"]', ['pmulti']), + ('a[rel~="friend"]', ['bob']), + ('a[rel~="met"]', ['bob']), + ('[rel~="friend"]', ['bob']), + ('[rel~="met"]', ['bob']), + ) + + def test_attribute_startswith(self): + self.assert_select_multiple( + ('[rel^="style"]', ['l1']), + ('link[rel^="style"]', ['l1']), + ('notlink[rel^="notstyle"]', []), + ('[rel^="notstyle"]', []), + ('link[rel^="notstyle"]', []), + ('link[href^="bla"]', ['l1']), + ('a[href^="http://"]', ['bob', 'me']), + ('[href^="http://"]', ['bob', 'me']), + ('[id^="p"]', ['pmulti', 'p1']), + ('[id^="m"]', ['me', 'main']), + ('div[id^="m"]', ['main']), + ('a[id^="m"]', ['me']), + ('div[data-tag^="dashed"]', ['data1']) + ) + + def test_attribute_endswith(self): + self.assert_select_multiple( + ('[href$=".css"]', ['l1']), + ('link[href$=".css"]', ['l1']), + ('link[id$="1"]', ['l1']), + ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']), + ('div[id$="1"]', ['data1']), + ('[id$="noending"]', []), + ) + + def test_attribute_contains(self): + self.assert_select_multiple( + # From test_attribute_startswith + ('[rel*="style"]', ['l1']), + ('link[rel*="style"]', ['l1']), + ('notlink[rel*="notstyle"]', []), + ('[rel*="notstyle"]', []), + ('link[rel*="notstyle"]', []), + ('link[href*="bla"]', ['l1']), + ('[href*="http://"]', ['bob', 'me']), + ('[id*="p"]', ['pmulti', 'p1']), + ('div[id*="m"]', ['main']), + ('a[id*="m"]', ['me']), + # From test_attribute_endswith + ('[href*=".css"]', ['l1']), + ('link[href*=".css"]', ['l1']), + ('link[id*="1"]', ['l1']), + ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']), + ('div[id*="1"]', ['data1']), + ('[id*="noending"]', []), + # New for this test + ('[href*="."]', ['bob', 'me', 'l1']), + ('a[href*="."]', ['bob', 'me']), + ('link[href*="."]', ['l1']), + ('div[id*="n"]', ['main', 'inner']), + ('div[id*="nn"]', ['inner']), + ('div[data-tag*="edval"]', ['data1']) + ) + + def test_attribute_exact_or_hypen(self): + self.assert_select_multiple( + ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), + ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), + ('p[lang|="fr"]', ['lang-fr']), + ('p[lang|="gb"]', []), + ) + + def test_attribute_exists(self): + self.assert_select_multiple( + ('[rel]', ['l1', 'bob', 'me']), + ('link[rel]', ['l1']), + ('a[rel]', ['bob', 'me']), + ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']), + ('p[class]', ['p1', 'pmulti']), + ('[blah]', []), + ('p[blah]', []), + ('div[data-tag]', ['data1']) + ) + + def test_quoted_space_in_selector_name(self): + html = """
nope
+
yes
+ """ + soup = BeautifulSoup(html, 'html.parser') + [chosen] = soup.select('div[style="display: right"]') + assert "yes" == chosen.string + + def test_unsupported_pseudoclass(self): + with pytest.raises(NotImplementedError): + self.soup.select("a:no-such-pseudoclass") + + with pytest.raises(SelectorSyntaxError): + self.soup.select("a:nth-of-type(a)") + + def test_nth_of_type(self): + # Try to select first paragraph + els = self.soup.select('div#inner p:nth-of-type(1)') + assert len(els) == 1 + assert els[0].string == 'Some text' + + # Try to select third paragraph + els = self.soup.select('div#inner p:nth-of-type(3)') + assert len(els) == 1 + assert els[0].string == 'Another' + + # Try to select (non-existent!) fourth paragraph + els = self.soup.select('div#inner p:nth-of-type(4)') + assert len(els) == 0 + + # Zero will select no tags. + els = self.soup.select('div p:nth-of-type(0)') + assert len(els) == 0 + + def test_nth_of_type_direct_descendant(self): + els = self.soup.select('div#inner > p:nth-of-type(1)') + assert len(els) == 1 + assert els[0].string == 'Some text' + + def test_id_child_selector_nth_of_type(self): + self.assert_selects('#inner > p:nth-of-type(2)', ['p1']) + + def test_select_on_element(self): + # Other tests operate on the tree; this operates on an element + # within the tree. + inner = self.soup.find("div", id="main") + selected = inner.select("div") + # The
tag was selected. The