Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- meow/lib/python3.13/site-packages/certifi/__init__.py +4 -0
- meow/lib/python3.13/site-packages/certifi/__main__.py +12 -0
- meow/lib/python3.13/site-packages/certifi/cacert.pem +0 -0
- meow/lib/python3.13/site-packages/certifi/core.py +114 -0
- meow/lib/python3.13/site-packages/certifi/py.typed +0 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER +1 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE +21 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/METADATA +721 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/RECORD +35 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL +5 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt +2 -0
- meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt +1 -0
- meow/lib/python3.13/site-packages/charset_normalizer/__main__.py +6 -0
- meow/lib/python3.13/site-packages/charset_normalizer/cd.py +395 -0
- meow/lib/python3.13/site-packages/charset_normalizer/constant.py +1998 -0
- meow/lib/python3.13/site-packages/charset_normalizer/md.cpython-313-darwin.so +0 -0
- meow/lib/python3.13/site-packages/charset_normalizer/md__mypyc.cpython-313-darwin.so +0 -0
- meow/lib/python3.13/site-packages/charset_normalizer/py.typed +0 -0
- meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/INSTALLER +1 -0
- meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/METADATA +279 -0
- meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/RECORD +113 -0
- meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/WHEEL +4 -0
- meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/LICENSE +201 -0
- meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/RECORD +229 -0
- meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/REQUESTED +0 -0
- meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/WHEEL +5 -0
- meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/entry_points.txt +6 -0
- meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/top_level.txt +1 -0
- meow/lib/python3.13/site-packages/idna/codec.py +122 -0
- meow/lib/python3.13/site-packages/idna/uts46data.py +0 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/INSTALLER +1 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/LICENSE +3 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/LICENSE.APACHE +177 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/LICENSE.BSD +23 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/METADATA +102 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/RECORD +40 -0
- meow/lib/python3.13/site-packages/packaging-24.2.dist-info/WHEEL +4 -0
- meow/lib/python3.13/site-packages/pip/__init__.py +13 -0
- meow/lib/python3.13/site-packages/pip/__main__.py +24 -0
- meow/lib/python3.13/site-packages/pip/__pip-runner__.py +50 -0
- meow/lib/python3.13/site-packages/pip/py.typed +4 -0
- meow/lib/python3.13/site-packages/requests/__init__.py +184 -0
- meow/lib/python3.13/site-packages/requests/adapters.py +719 -0
- meow/lib/python3.13/site-packages/requests/api.py +157 -0
- meow/lib/python3.13/site-packages/requests/auth.py +314 -0
- meow/lib/python3.13/site-packages/requests/certs.py +17 -0
- meow/lib/python3.13/site-packages/requests/compat.py +94 -0
- meow/lib/python3.13/site-packages/requests/cookies.py +561 -0
- meow/lib/python3.13/site-packages/requests/exceptions.py +151 -0
- meow/lib/python3.13/site-packages/requests/help.py +134 -0
meow/lib/python3.13/site-packages/certifi/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .core import contents, where
|
2 |
+
|
3 |
+
__all__ = ["contents", "where"]
|
4 |
+
__version__ = "2024.12.14"
|
meow/lib/python3.13/site-packages/certifi/__main__.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
|
3 |
+
from certifi import contents, where
|
4 |
+
|
5 |
+
parser = argparse.ArgumentParser()
|
6 |
+
parser.add_argument("-c", "--contents", action="store_true")
|
7 |
+
args = parser.parse_args()
|
8 |
+
|
9 |
+
if args.contents:
|
10 |
+
print(contents())
|
11 |
+
else:
|
12 |
+
print(where())
|
meow/lib/python3.13/site-packages/certifi/cacert.pem
ADDED
The diff for this file is too large to render.
See raw diff
|
|
meow/lib/python3.13/site-packages/certifi/core.py
ADDED
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
certifi.py
|
3 |
+
~~~~~~~~~~
|
4 |
+
|
5 |
+
This module returns the installation location of cacert.pem or its contents.
|
6 |
+
"""
|
7 |
+
import sys
|
8 |
+
import atexit
|
9 |
+
|
10 |
+
def exit_cacert_ctx() -> None:
|
11 |
+
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
12 |
+
|
13 |
+
|
14 |
+
if sys.version_info >= (3, 11):
|
15 |
+
|
16 |
+
from importlib.resources import as_file, files
|
17 |
+
|
18 |
+
_CACERT_CTX = None
|
19 |
+
_CACERT_PATH = None
|
20 |
+
|
21 |
+
def where() -> str:
|
22 |
+
# This is slightly terrible, but we want to delay extracting the file
|
23 |
+
# in cases where we're inside of a zipimport situation until someone
|
24 |
+
# actually calls where(), but we don't want to re-extract the file
|
25 |
+
# on every call of where(), so we'll do it once then store it in a
|
26 |
+
# global variable.
|
27 |
+
global _CACERT_CTX
|
28 |
+
global _CACERT_PATH
|
29 |
+
if _CACERT_PATH is None:
|
30 |
+
# This is slightly janky, the importlib.resources API wants you to
|
31 |
+
# manage the cleanup of this file, so it doesn't actually return a
|
32 |
+
# path, it returns a context manager that will give you the path
|
33 |
+
# when you enter it and will do any cleanup when you leave it. In
|
34 |
+
# the common case of not needing a temporary file, it will just
|
35 |
+
# return the file system location and the __exit__() is a no-op.
|
36 |
+
#
|
37 |
+
# We also have to hold onto the actual context manager, because
|
38 |
+
# it will do the cleanup whenever it gets garbage collected, so
|
39 |
+
# we will also store that at the global level as well.
|
40 |
+
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
41 |
+
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
42 |
+
atexit.register(exit_cacert_ctx)
|
43 |
+
|
44 |
+
return _CACERT_PATH
|
45 |
+
|
46 |
+
def contents() -> str:
|
47 |
+
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
|
48 |
+
|
49 |
+
elif sys.version_info >= (3, 7):
|
50 |
+
|
51 |
+
from importlib.resources import path as get_path, read_text
|
52 |
+
|
53 |
+
_CACERT_CTX = None
|
54 |
+
_CACERT_PATH = None
|
55 |
+
|
56 |
+
def where() -> str:
|
57 |
+
# This is slightly terrible, but we want to delay extracting the
|
58 |
+
# file in cases where we're inside of a zipimport situation until
|
59 |
+
# someone actually calls where(), but we don't want to re-extract
|
60 |
+
# the file on every call of where(), so we'll do it once then store
|
61 |
+
# it in a global variable.
|
62 |
+
global _CACERT_CTX
|
63 |
+
global _CACERT_PATH
|
64 |
+
if _CACERT_PATH is None:
|
65 |
+
# This is slightly janky, the importlib.resources API wants you
|
66 |
+
# to manage the cleanup of this file, so it doesn't actually
|
67 |
+
# return a path, it returns a context manager that will give
|
68 |
+
# you the path when you enter it and will do any cleanup when
|
69 |
+
# you leave it. In the common case of not needing a temporary
|
70 |
+
# file, it will just return the file system location and the
|
71 |
+
# __exit__() is a no-op.
|
72 |
+
#
|
73 |
+
# We also have to hold onto the actual context manager, because
|
74 |
+
# it will do the cleanup whenever it gets garbage collected, so
|
75 |
+
# we will also store that at the global level as well.
|
76 |
+
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
77 |
+
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
78 |
+
atexit.register(exit_cacert_ctx)
|
79 |
+
|
80 |
+
return _CACERT_PATH
|
81 |
+
|
82 |
+
def contents() -> str:
|
83 |
+
return read_text("certifi", "cacert.pem", encoding="ascii")
|
84 |
+
|
85 |
+
else:
|
86 |
+
import os
|
87 |
+
import types
|
88 |
+
from typing import Union
|
89 |
+
|
90 |
+
Package = Union[types.ModuleType, str]
|
91 |
+
Resource = Union[str, "os.PathLike"]
|
92 |
+
|
93 |
+
# This fallback will work for Python versions prior to 3.7 that lack the
|
94 |
+
# importlib.resources module but relies on the existing `where` function
|
95 |
+
# so won't address issues with environments like PyOxidizer that don't set
|
96 |
+
# __file__ on modules.
|
97 |
+
def read_text(
|
98 |
+
package: Package,
|
99 |
+
resource: Resource,
|
100 |
+
encoding: str = 'utf-8',
|
101 |
+
errors: str = 'strict'
|
102 |
+
) -> str:
|
103 |
+
with open(where(), encoding=encoding) as data:
|
104 |
+
return data.read()
|
105 |
+
|
106 |
+
# If we don't have importlib.resources, then we will just do the old logic
|
107 |
+
# of assuming we're on the filesystem and munge the path directly.
|
108 |
+
def where() -> str:
|
109 |
+
f = os.path.dirname(__file__)
|
110 |
+
|
111 |
+
return os.path.join(f, "cacert.pem")
|
112 |
+
|
113 |
+
def contents() -> str:
|
114 |
+
return read_text("certifi", "cacert.pem", encoding="ascii")
|
meow/lib/python3.13/site-packages/certifi/py.typed
ADDED
File without changes
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2025 TAHRI Ahmed R.
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/METADATA
ADDED
@@ -0,0 +1,721 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: charset-normalizer
|
3 |
+
Version: 3.4.1
|
4 |
+
Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
|
5 |
+
Author-email: "Ahmed R. TAHRI" <[email protected]>
|
6 |
+
Maintainer-email: "Ahmed R. TAHRI" <[email protected]>
|
7 |
+
License: MIT
|
8 |
+
Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md
|
9 |
+
Project-URL: Documentation, https://charset-normalizer.readthedocs.io/
|
10 |
+
Project-URL: Code, https://github.com/jawah/charset_normalizer
|
11 |
+
Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues
|
12 |
+
Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
|
13 |
+
Classifier: Development Status :: 5 - Production/Stable
|
14 |
+
Classifier: Intended Audience :: Developers
|
15 |
+
Classifier: License :: OSI Approved :: MIT License
|
16 |
+
Classifier: Operating System :: OS Independent
|
17 |
+
Classifier: Programming Language :: Python
|
18 |
+
Classifier: Programming Language :: Python :: 3
|
19 |
+
Classifier: Programming Language :: Python :: 3.7
|
20 |
+
Classifier: Programming Language :: Python :: 3.8
|
21 |
+
Classifier: Programming Language :: Python :: 3.9
|
22 |
+
Classifier: Programming Language :: Python :: 3.10
|
23 |
+
Classifier: Programming Language :: Python :: 3.11
|
24 |
+
Classifier: Programming Language :: Python :: 3.12
|
25 |
+
Classifier: Programming Language :: Python :: 3.13
|
26 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
27 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
28 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
29 |
+
Classifier: Topic :: Text Processing :: Linguistic
|
30 |
+
Classifier: Topic :: Utilities
|
31 |
+
Classifier: Typing :: Typed
|
32 |
+
Requires-Python: >=3.7
|
33 |
+
Description-Content-Type: text/markdown
|
34 |
+
License-File: LICENSE
|
35 |
+
Provides-Extra: unicode-backport
|
36 |
+
|
37 |
+
<h1 align="center">Charset Detection, for Everyone 👋</h1>
|
38 |
+
|
39 |
+
<p align="center">
|
40 |
+
<sup>The Real First Universal Charset Detector</sup><br>
|
41 |
+
<a href="https://pypi.org/project/charset-normalizer">
|
42 |
+
<img src="https://img.shields.io/pypi/pyversions/charset_normalizer.svg?orange=blue" />
|
43 |
+
</a>
|
44 |
+
<a href="https://pepy.tech/project/charset-normalizer/">
|
45 |
+
<img alt="Download Count Total" src="https://static.pepy.tech/badge/charset-normalizer/month" />
|
46 |
+
</a>
|
47 |
+
<a href="https://bestpractices.coreinfrastructure.org/projects/7297">
|
48 |
+
<img src="https://bestpractices.coreinfrastructure.org/projects/7297/badge">
|
49 |
+
</a>
|
50 |
+
</p>
|
51 |
+
<p align="center">
|
52 |
+
<sup><i>Featured Packages</i></sup><br>
|
53 |
+
<a href="https://github.com/jawah/niquests">
|
54 |
+
<img alt="Static Badge" src="https://img.shields.io/badge/Niquests-Best_HTTP_Client-cyan">
|
55 |
+
</a>
|
56 |
+
<a href="https://github.com/jawah/wassima">
|
57 |
+
<img alt="Static Badge" src="https://img.shields.io/badge/Wassima-Certifi_Killer-cyan">
|
58 |
+
</a>
|
59 |
+
</p>
|
60 |
+
<p align="center">
|
61 |
+
<sup><i>In other language (unofficial port - by the community)</i></sup><br>
|
62 |
+
<a href="https://github.com/nickspring/charset-normalizer-rs">
|
63 |
+
<img alt="Static Badge" src="https://img.shields.io/badge/Rust-red">
|
64 |
+
</a>
|
65 |
+
</p>
|
66 |
+
|
67 |
+
> A library that helps you read text from an unknown charset encoding.<br /> Motivated by `chardet`,
|
68 |
+
> I'm trying to resolve the issue by taking a new approach.
|
69 |
+
> All IANA character set names for which the Python core library provides codecs are supported.
|
70 |
+
|
71 |
+
<p align="center">
|
72 |
+
>>>>> <a href="https://charsetnormalizerweb.ousret.now.sh" target="_blank">👉 Try Me Online Now, Then Adopt Me 👈 </a> <<<<<
|
73 |
+
</p>
|
74 |
+
|
75 |
+
This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
|
76 |
+
|
77 |
+
| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
|
78 |
+
|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
|
79 |
+
| `Fast` | ❌ | ✅ | ✅ |
|
80 |
+
| `Universal**` | ❌ | ✅ | ❌ |
|
81 |
+
| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
|
82 |
+
| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
|
83 |
+
| `License` | LGPL-2.1<br>_restrictive_ | MIT | MPL-1.1<br>_restrictive_ |
|
84 |
+
| `Native Python` | ✅ | ✅ | ❌ |
|
85 |
+
| `Detect spoken language` | ❌ | ✅ | N/A |
|
86 |
+
| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
|
87 |
+
| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |
|
88 |
+
| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
|
89 |
+
|
90 |
+
<p align="center">
|
91 |
+
<img src="https://i.imgflip.com/373iay.gif" alt="Reading Normalized Text" width="226"/><img src="https://media.tenor.com/images/c0180f70732a18b4965448d33adba3d0/tenor.gif" alt="Cat Reading Text" width="200"/>
|
92 |
+
</p>
|
93 |
+
|
94 |
+
*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*<br>
|
95 |
+
|
96 |
+
## ⚡ Performance
|
97 |
+
|
98 |
+
This package offer better performance than its counterpart Chardet. Here are some numbers.
|
99 |
+
|
100 |
+
| Package | Accuracy | Mean per file (ms) | File per sec (est) |
|
101 |
+
|-----------------------------------------------|:--------:|:------------------:|:------------------:|
|
102 |
+
| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |
|
103 |
+
| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
|
104 |
+
|
105 |
+
| Package | 99th percentile | 95th percentile | 50th percentile |
|
106 |
+
|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
|
107 |
+
| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |
|
108 |
+
| charset-normalizer | 100 ms | 50 ms | 5 ms |
|
109 |
+
|
110 |
+
_updated as of december 2024 using CPython 3.12_
|
111 |
+
|
112 |
+
Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
|
113 |
+
|
114 |
+
> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
|
115 |
+
> And yes, these results might change at any time. The dataset can be updated to include more files.
|
116 |
+
> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
|
117 |
+
> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
|
118 |
+
> (e.g. Supported Encoding) Challenge-them if you want.
|
119 |
+
|
120 |
+
## ✨ Installation
|
121 |
+
|
122 |
+
Using pip:
|
123 |
+
|
124 |
+
```sh
|
125 |
+
pip install charset-normalizer -U
|
126 |
+
```
|
127 |
+
|
128 |
+
## 🚀 Basic Usage
|
129 |
+
|
130 |
+
### CLI
|
131 |
+
This package comes with a CLI.
|
132 |
+
|
133 |
+
```
|
134 |
+
usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
|
135 |
+
file [file ...]
|
136 |
+
|
137 |
+
The Real First Universal Charset Detector. Discover originating encoding used
|
138 |
+
on text file. Normalize text to unicode.
|
139 |
+
|
140 |
+
positional arguments:
|
141 |
+
files File(s) to be analysed
|
142 |
+
|
143 |
+
optional arguments:
|
144 |
+
-h, --help show this help message and exit
|
145 |
+
-v, --verbose Display complementary information about file if any.
|
146 |
+
Stdout will contain logs about the detection process.
|
147 |
+
-a, --with-alternative
|
148 |
+
Output complementary possibilities if any. Top-level
|
149 |
+
JSON WILL be a list.
|
150 |
+
-n, --normalize Permit to normalize input file. If not set, program
|
151 |
+
does not write anything.
|
152 |
+
-m, --minimal Only output the charset detected to STDOUT. Disabling
|
153 |
+
JSON output.
|
154 |
+
-r, --replace Replace file when trying to normalize it instead of
|
155 |
+
creating a new one.
|
156 |
+
-f, --force Replace file without asking if you are sure, use this
|
157 |
+
flag with caution.
|
158 |
+
-t THRESHOLD, --threshold THRESHOLD
|
159 |
+
Define a custom maximum amount of chaos allowed in
|
160 |
+
decoded content. 0. <= chaos <= 1.
|
161 |
+
--version Show version information and exit.
|
162 |
+
```
|
163 |
+
|
164 |
+
```bash
|
165 |
+
normalizer ./data/sample.1.fr.srt
|
166 |
+
```
|
167 |
+
|
168 |
+
or
|
169 |
+
|
170 |
+
```bash
|
171 |
+
python -m charset_normalizer ./data/sample.1.fr.srt
|
172 |
+
```
|
173 |
+
|
174 |
+
🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
|
175 |
+
|
176 |
+
```json
|
177 |
+
{
|
178 |
+
"path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
|
179 |
+
"encoding": "cp1252",
|
180 |
+
"encoding_aliases": [
|
181 |
+
"1252",
|
182 |
+
"windows_1252"
|
183 |
+
],
|
184 |
+
"alternative_encodings": [
|
185 |
+
"cp1254",
|
186 |
+
"cp1256",
|
187 |
+
"cp1258",
|
188 |
+
"iso8859_14",
|
189 |
+
"iso8859_15",
|
190 |
+
"iso8859_16",
|
191 |
+
"iso8859_3",
|
192 |
+
"iso8859_9",
|
193 |
+
"latin_1",
|
194 |
+
"mbcs"
|
195 |
+
],
|
196 |
+
"language": "French",
|
197 |
+
"alphabets": [
|
198 |
+
"Basic Latin",
|
199 |
+
"Latin-1 Supplement"
|
200 |
+
],
|
201 |
+
"has_sig_or_bom": false,
|
202 |
+
"chaos": 0.149,
|
203 |
+
"coherence": 97.152,
|
204 |
+
"unicode_path": null,
|
205 |
+
"is_preferred": true
|
206 |
+
}
|
207 |
+
```
|
208 |
+
|
209 |
+
### Python
|
210 |
+
*Just print out normalized text*
|
211 |
+
```python
|
212 |
+
from charset_normalizer import from_path
|
213 |
+
|
214 |
+
results = from_path('./my_subtitle.srt')
|
215 |
+
|
216 |
+
print(str(results.best()))
|
217 |
+
```
|
218 |
+
|
219 |
+
*Upgrade your code without effort*
|
220 |
+
```python
|
221 |
+
from charset_normalizer import detect
|
222 |
+
```
|
223 |
+
|
224 |
+
The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
|
225 |
+
|
226 |
+
See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
|
227 |
+
|
228 |
+
## 😇 Why
|
229 |
+
|
230 |
+
When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
|
231 |
+
reliable alternative using a completely different method. Also! I never back down on a good challenge!
|
232 |
+
|
233 |
+
I **don't care** about the **originating charset** encoding, because **two different tables** can
|
234 |
+
produce **two identical rendered string.**
|
235 |
+
What I want is to get readable text, the best I can.
|
236 |
+
|
237 |
+
In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
|
238 |
+
|
239 |
+
Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
|
240 |
+
|
241 |
+
## 🍰 How
|
242 |
+
|
243 |
+
- Discard all charset encoding table that could not fit the binary content.
|
244 |
+
- Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
|
245 |
+
- Extract matches with the lowest mess detected.
|
246 |
+
- Additionally, we measure coherence / probe for a language.
|
247 |
+
|
248 |
+
**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
|
249 |
+
|
250 |
+
*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
|
251 |
+
**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).
|
252 |
+
I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
|
253 |
+
improve or rewrite it.
|
254 |
+
|
255 |
+
*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
|
256 |
+
that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
|
257 |
+
|
258 |
+
## ⚡ Known limitations
|
259 |
+
|
260 |
+
- Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
|
261 |
+
- Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
|
262 |
+
|
263 |
+
## ⚠️ About Python EOLs
|
264 |
+
|
265 |
+
**If you are running:**
|
266 |
+
|
267 |
+
- Python >=2.7,<3.5: Unsupported
|
268 |
+
- Python 3.5: charset-normalizer < 2.1
|
269 |
+
- Python 3.6: charset-normalizer < 3.1
|
270 |
+
- Python 3.7: charset-normalizer < 4.0
|
271 |
+
|
272 |
+
Upgrade your Python interpreter as soon as possible.
|
273 |
+
|
274 |
+
## 👤 Contributing
|
275 |
+
|
276 |
+
Contributions, issues and feature requests are very much welcome.<br />
|
277 |
+
Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
|
278 |
+
|
279 |
+
## 📝 License
|
280 |
+
|
281 |
+
Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).<br />
|
282 |
+
This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
|
283 |
+
|
284 |
+
Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
|
285 |
+
|
286 |
+
## 💼 For Enterprise
|
287 |
+
|
288 |
+
Professional support for charset-normalizer is available as part of the [Tidelift
|
289 |
+
Subscription][1]. Tidelift gives software development teams a single source for
|
290 |
+
purchasing and maintaining their software, with professional grade assurances
|
291 |
+
from the experts who know it best, while seamlessly integrating with existing
|
292 |
+
tools.
|
293 |
+
|
294 |
+
[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
|
295 |
+
|
296 |
+
[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297)
|
297 |
+
|
298 |
+
# Changelog
|
299 |
+
All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
300 |
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
301 |
+
|
302 |
+
## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)
|
303 |
+
|
304 |
+
### Changed
|
305 |
+
- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.
|
306 |
+
- Enforce annotation delayed loading for a simpler and consistent types in the project.
|
307 |
+
- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8
|
308 |
+
|
309 |
+
### Added
|
310 |
+
- pre-commit configuration.
|
311 |
+
- noxfile.
|
312 |
+
|
313 |
+
### Removed
|
314 |
+
- `build-requirements.txt` as per using `pyproject.toml` native build configuration.
|
315 |
+
- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).
|
316 |
+
- `setup.cfg` in favor of `pyproject.toml` metadata configuration.
|
317 |
+
- Unused `utils.range_scan` function.
|
318 |
+
|
319 |
+
### Fixed
|
320 |
+
- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)
|
321 |
+
- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+
|
322 |
+
|
323 |
+
## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)
|
324 |
+
|
325 |
+
### Added
|
326 |
+
- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.
|
327 |
+
- Support for Python 3.13 (#512)
|
328 |
+
|
329 |
+
### Fixed
|
330 |
+
- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.
|
331 |
+
- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)
|
332 |
+
- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)
|
333 |
+
|
334 |
+
## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)
|
335 |
+
|
336 |
+
### Fixed
|
337 |
+
- Unintentional memory usage regression when using large payload that match several encoding (#376)
|
338 |
+
- Regression on some detection case showcased in the documentation (#371)
|
339 |
+
|
340 |
+
### Added
|
341 |
+
- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)
|
342 |
+
|
343 |
+
## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)
|
344 |
+
|
345 |
+
### Changed
|
346 |
+
- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8
|
347 |
+
- Improved the general detection reliability based on reports from the community
|
348 |
+
|
349 |
+
## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)
|
350 |
+
|
351 |
+
### Added
|
352 |
+
- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`
|
353 |
+
- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)
|
354 |
+
|
355 |
+
### Removed
|
356 |
+
- (internal) Redundant utils.is_ascii function and unused function is_private_use_only
|
357 |
+
- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant
|
358 |
+
|
359 |
+
### Changed
|
360 |
+
- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection
|
361 |
+
- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8
|
362 |
+
|
363 |
+
### Fixed
|
364 |
+
- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350)
|
365 |
+
|
366 |
+
## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
|
367 |
+
|
368 |
+
### Changed
|
369 |
+
- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
|
370 |
+
- Minor improvement over the global detection reliability
|
371 |
+
|
372 |
+
### Added
|
373 |
+
- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
|
374 |
+
- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
|
375 |
+
- Explicit support for Python 3.12
|
376 |
+
|
377 |
+
### Fixed
|
378 |
+
- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
|
379 |
+
|
380 |
+
## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
|
381 |
+
|
382 |
+
### Added
|
383 |
+
- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
|
384 |
+
|
385 |
+
### Removed
|
386 |
+
- Support for Python 3.6 (PR #260)
|
387 |
+
|
388 |
+
### Changed
|
389 |
+
- Optional speedup provided by mypy/c 1.0.1
|
390 |
+
|
391 |
+
## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
|
392 |
+
|
393 |
+
### Fixed
|
394 |
+
- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
|
395 |
+
|
396 |
+
### Changed
|
397 |
+
- Speedup provided by mypy/c 0.990 on Python >= 3.7
|
398 |
+
|
399 |
+
## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
|
400 |
+
|
401 |
+
### Added
|
402 |
+
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
|
403 |
+
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
|
404 |
+
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
|
405 |
+
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
|
406 |
+
|
407 |
+
### Changed
|
408 |
+
- Build with static metadata using 'build' frontend
|
409 |
+
- Make the language detection stricter
|
410 |
+
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
|
411 |
+
|
412 |
+
### Fixed
|
413 |
+
- CLI with opt --normalize fail when using full path for files
|
414 |
+
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
|
415 |
+
- Sphinx warnings when generating the documentation
|
416 |
+
|
417 |
+
### Removed
|
418 |
+
- Coherence detector no longer return 'Simple English' instead return 'English'
|
419 |
+
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
|
420 |
+
- Breaking: Method `first()` and `best()` from CharsetMatch
|
421 |
+
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
|
422 |
+
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
|
423 |
+
- Breaking: Top-level function `normalize`
|
424 |
+
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
|
425 |
+
- Support for the backport `unicodedata2`
|
426 |
+
|
427 |
+
## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
|
428 |
+
|
429 |
+
### Added
|
430 |
+
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
|
431 |
+
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
|
432 |
+
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
|
433 |
+
|
434 |
+
### Changed
|
435 |
+
- Build with static metadata using 'build' frontend
|
436 |
+
- Make the language detection stricter
|
437 |
+
|
438 |
+
### Fixed
|
439 |
+
- CLI with opt --normalize fail when using full path for files
|
440 |
+
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
|
441 |
+
|
442 |
+
### Removed
|
443 |
+
- Coherence detector no longer return 'Simple English' instead return 'English'
|
444 |
+
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
|
445 |
+
|
446 |
+
## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
|
447 |
+
|
448 |
+
### Added
|
449 |
+
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
|
450 |
+
|
451 |
+
### Removed
|
452 |
+
- Breaking: Method `first()` and `best()` from CharsetMatch
|
453 |
+
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
|
454 |
+
|
455 |
+
### Fixed
|
456 |
+
- Sphinx warnings when generating the documentation
|
457 |
+
|
458 |
+
## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
|
459 |
+
|
460 |
+
### Changed
|
461 |
+
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
|
462 |
+
|
463 |
+
### Removed
|
464 |
+
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
|
465 |
+
- Breaking: Top-level function `normalize`
|
466 |
+
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
|
467 |
+
- Support for the backport `unicodedata2`
|
468 |
+
|
469 |
+
## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
|
470 |
+
|
471 |
+
### Deprecated
|
472 |
+
- Function `normalize` scheduled for removal in 3.0
|
473 |
+
|
474 |
+
### Changed
|
475 |
+
- Removed useless call to decode in fn is_unprintable (#206)
|
476 |
+
|
477 |
+
### Fixed
|
478 |
+
- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
|
479 |
+
|
480 |
+
## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
|
481 |
+
|
482 |
+
### Added
|
483 |
+
- Output the Unicode table version when running the CLI with `--version` (PR #194)
|
484 |
+
|
485 |
+
### Changed
|
486 |
+
- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
|
487 |
+
- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
|
488 |
+
|
489 |
+
### Fixed
|
490 |
+
- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
|
491 |
+
- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
|
492 |
+
|
493 |
+
### Removed
|
494 |
+
- Support for Python 3.5 (PR #192)
|
495 |
+
|
496 |
+
### Deprecated
|
497 |
+
- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
|
498 |
+
|
499 |
+
## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
|
500 |
+
|
501 |
+
### Fixed
|
502 |
+
- ASCII miss-detection on rare cases (PR #170)
|
503 |
+
|
504 |
+
## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
|
505 |
+
|
506 |
+
### Added
|
507 |
+
- Explicit support for Python 3.11 (PR #164)
|
508 |
+
|
509 |
+
### Changed
|
510 |
+
- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
|
511 |
+
|
512 |
+
## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
|
513 |
+
|
514 |
+
### Fixed
|
515 |
+
- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
|
516 |
+
|
517 |
+
### Changed
|
518 |
+
- Skipping the language-detection (CD) on ASCII (PR #155)
|
519 |
+
|
520 |
+
## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
|
521 |
+
|
522 |
+
### Changed
|
523 |
+
- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
|
524 |
+
|
525 |
+
### Fixed
|
526 |
+
- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
|
527 |
+
|
528 |
+
## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
|
529 |
+
### Changed
|
530 |
+
- Improvement over Vietnamese detection (PR #126)
|
531 |
+
- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
|
532 |
+
- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
|
533 |
+
- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
|
534 |
+
- Code style as refactored by Sourcery-AI (PR #131)
|
535 |
+
- Minor adjustment on the MD around european words (PR #133)
|
536 |
+
- Remove and replace SRTs from assets / tests (PR #139)
|
537 |
+
- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
|
538 |
+
- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
|
539 |
+
|
540 |
+
### Fixed
|
541 |
+
- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
|
542 |
+
- Avoid using too insignificant chunk (PR #137)
|
543 |
+
|
544 |
+
### Added
|
545 |
+
- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
|
546 |
+
- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
|
547 |
+
|
548 |
+
## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
|
549 |
+
### Added
|
550 |
+
- Add support for Kazakh (Cyrillic) language detection (PR #109)
|
551 |
+
|
552 |
+
### Changed
|
553 |
+
- Further, improve inferring the language from a given single-byte code page (PR #112)
|
554 |
+
- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
|
555 |
+
- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
|
556 |
+
- Various detection improvement (MD+CD) (PR #117)
|
557 |
+
|
558 |
+
### Removed
|
559 |
+
- Remove redundant logging entry about detected language(s) (PR #115)
|
560 |
+
|
561 |
+
### Fixed
|
562 |
+
- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
|
563 |
+
|
564 |
+
## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
|
565 |
+
### Fixed
|
566 |
+
- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
|
567 |
+
- Fix CLI crash when using --minimal output in certain cases (PR #103)
|
568 |
+
|
569 |
+
### Changed
|
570 |
+
- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
|
571 |
+
|
572 |
+
## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
|
573 |
+
### Changed
|
574 |
+
- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
|
575 |
+
- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
|
576 |
+
- The Unicode detection is slightly improved (PR #93)
|
577 |
+
- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
|
578 |
+
|
579 |
+
### Removed
|
580 |
+
- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
|
581 |
+
|
582 |
+
### Fixed
|
583 |
+
- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
|
584 |
+
- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
|
585 |
+
- The MANIFEST.in was not exhaustive (PR #78)
|
586 |
+
|
587 |
+
## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
|
588 |
+
### Fixed
|
589 |
+
- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
|
590 |
+
- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
|
591 |
+
- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
|
592 |
+
- Submatch factoring could be wrong in rare edge cases (PR #72)
|
593 |
+
- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
|
594 |
+
- Fix line endings from CRLF to LF for certain project files (PR #67)
|
595 |
+
|
596 |
+
### Changed
|
597 |
+
- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
|
598 |
+
- Allow fallback on specified encoding if any (PR #71)
|
599 |
+
|
600 |
+
## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
|
601 |
+
### Changed
|
602 |
+
- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
|
603 |
+
- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
|
604 |
+
|
605 |
+
## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
|
606 |
+
### Fixed
|
607 |
+
- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
|
608 |
+
|
609 |
+
### Changed
|
610 |
+
- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
|
611 |
+
|
612 |
+
## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
|
613 |
+
### Fixed
|
614 |
+
- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
|
615 |
+
- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
|
616 |
+
- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
|
617 |
+
- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
|
618 |
+
|
619 |
+
### Changed
|
620 |
+
- Public function normalize default args values were not aligned with from_bytes (PR #53)
|
621 |
+
|
622 |
+
### Added
|
623 |
+
- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
|
624 |
+
|
625 |
+
## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
|
626 |
+
### Changed
|
627 |
+
- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
|
628 |
+
- Accent has been made on UTF-8 detection, should perform rather instantaneous.
|
629 |
+
- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
|
630 |
+
- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
|
631 |
+
- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
|
632 |
+
- utf_7 detection has been reinstated.
|
633 |
+
|
634 |
+
### Removed
|
635 |
+
- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
|
636 |
+
- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
|
637 |
+
- The exception hook on UnicodeDecodeError has been removed.
|
638 |
+
|
639 |
+
### Deprecated
|
640 |
+
- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
|
641 |
+
|
642 |
+
### Fixed
|
643 |
+
- The CLI output used the relative path of the file(s). Should be absolute.
|
644 |
+
|
645 |
+
## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
|
646 |
+
### Fixed
|
647 |
+
- Logger configuration/usage no longer conflict with others (PR #44)
|
648 |
+
|
649 |
+
## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
|
650 |
+
### Removed
|
651 |
+
- Using standard logging instead of using the package loguru.
|
652 |
+
- Dropping nose test framework in favor of the maintained pytest.
|
653 |
+
- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
|
654 |
+
- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
|
655 |
+
- Stop support for UTF-7 that does not contain a SIG.
|
656 |
+
- Dropping PrettyTable, replaced with pure JSON output in CLI.
|
657 |
+
|
658 |
+
### Fixed
|
659 |
+
- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
|
660 |
+
- Not searching properly for the BOM when trying utf32/16 parent codec.
|
661 |
+
|
662 |
+
### Changed
|
663 |
+
- Improving the package final size by compressing frequencies.json.
|
664 |
+
- Huge improvement over the larges payload.
|
665 |
+
|
666 |
+
### Added
|
667 |
+
- CLI now produces JSON consumable output.
|
668 |
+
- Return ASCII if given sequences fit. Given reasonable confidence.
|
669 |
+
|
670 |
+
## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
|
671 |
+
|
672 |
+
### Fixed
|
673 |
+
- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
|
674 |
+
|
675 |
+
## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
|
676 |
+
|
677 |
+
### Fixed
|
678 |
+
- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
|
679 |
+
|
680 |
+
## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
|
681 |
+
|
682 |
+
### Fixed
|
683 |
+
- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
|
684 |
+
|
685 |
+
## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
|
686 |
+
|
687 |
+
### Changed
|
688 |
+
- Amend the previous release to allow prettytable 2.0 (PR #35)
|
689 |
+
|
690 |
+
## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
|
691 |
+
|
692 |
+
### Fixed
|
693 |
+
- Fix error while using the package with a python pre-release interpreter (PR #33)
|
694 |
+
|
695 |
+
### Changed
|
696 |
+
- Dependencies refactoring, constraints revised.
|
697 |
+
|
698 |
+
### Added
|
699 |
+
- Add python 3.9 and 3.10 to the supported interpreters
|
700 |
+
|
701 |
+
MIT License
|
702 |
+
|
703 |
+
Copyright (c) 2025 TAHRI Ahmed R.
|
704 |
+
|
705 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
706 |
+
of this software and associated documentation files (the "Software"), to deal
|
707 |
+
in the Software without restriction, including without limitation the rights
|
708 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
709 |
+
copies of the Software, and to permit persons to whom the Software is
|
710 |
+
furnished to do so, subject to the following conditions:
|
711 |
+
|
712 |
+
The above copyright notice and this permission notice shall be included in all
|
713 |
+
copies or substantial portions of the Software.
|
714 |
+
|
715 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
716 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
717 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
718 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
719 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
720 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
721 |
+
SOFTWARE.
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/RECORD
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
../../../bin/normalizer,sha256=F9EtnPSlTHzVRE5yS9KScrHy7-a9LjFl1o0X3x8yS1U,254
|
2 |
+
charset_normalizer-3.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
3 |
+
charset_normalizer-3.4.1.dist-info/LICENSE,sha256=bQ1Bv-FwrGx9wkjJpj4lTQ-0WmDVCoJX0K-SxuJJuIc,1071
|
4 |
+
charset_normalizer-3.4.1.dist-info/METADATA,sha256=JbyHzhmqZh_ugEn1Y7TY7CDYZA9FoU6BP25hrCNDf50,35313
|
5 |
+
charset_normalizer-3.4.1.dist-info/RECORD,,
|
6 |
+
charset_normalizer-3.4.1.dist-info/WHEEL,sha256=8V5JjwATQfL0d9dd03DBSgMiHY8GVsHwXVusby9L_SY,115
|
7 |
+
charset_normalizer-3.4.1.dist-info/entry_points.txt,sha256=8C-Y3iXIfyXQ83Tpir2B8t-XLJYpxF5xbb38d_js-h4,65
|
8 |
+
charset_normalizer-3.4.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
|
9 |
+
charset_normalizer/__init__.py,sha256=OKRxRv2Zhnqk00tqkN0c1BtJjm165fWXLydE52IKuHc,1590
|
10 |
+
charset_normalizer/__main__.py,sha256=yzYxMR-IhKRHYwcSlavEv8oGdwxsR89mr2X09qXGdps,109
|
11 |
+
charset_normalizer/__pycache__/__init__.cpython-313.pyc,,
|
12 |
+
charset_normalizer/__pycache__/__main__.cpython-313.pyc,,
|
13 |
+
charset_normalizer/__pycache__/api.cpython-313.pyc,,
|
14 |
+
charset_normalizer/__pycache__/cd.cpython-313.pyc,,
|
15 |
+
charset_normalizer/__pycache__/constant.cpython-313.pyc,,
|
16 |
+
charset_normalizer/__pycache__/legacy.cpython-313.pyc,,
|
17 |
+
charset_normalizer/__pycache__/md.cpython-313.pyc,,
|
18 |
+
charset_normalizer/__pycache__/models.cpython-313.pyc,,
|
19 |
+
charset_normalizer/__pycache__/utils.cpython-313.pyc,,
|
20 |
+
charset_normalizer/__pycache__/version.cpython-313.pyc,,
|
21 |
+
charset_normalizer/api.py,sha256=qBRz8mJ_R5E713R6TOyqHEdnmyxbEDnCSHvx32ubDGg,22617
|
22 |
+
charset_normalizer/cd.py,sha256=WKTo1HDb-H9HfCDc3Bfwq5jzS25Ziy9SE2a74SgTq88,12522
|
23 |
+
charset_normalizer/cli/__init__.py,sha256=D8I86lFk2-py45JvqxniTirSj_sFyE6sjaY_0-G1shc,136
|
24 |
+
charset_normalizer/cli/__main__.py,sha256=VGC9klOoi6_R2z8rmyrc936kv7u2A1udjjHtlmNPDTM,10410
|
25 |
+
charset_normalizer/cli/__pycache__/__init__.cpython-313.pyc,,
|
26 |
+
charset_normalizer/cli/__pycache__/__main__.cpython-313.pyc,,
|
27 |
+
charset_normalizer/constant.py,sha256=4VuTcZNLew1j_8ixA-Rt_VVqNWD4pwgHOHMCMlr0964,40477
|
28 |
+
charset_normalizer/legacy.py,sha256=yhNXsPHkBfqPXKRb-sPXNj3Bscp9-mFGcYOkJ62tg9c,2328
|
29 |
+
charset_normalizer/md.cpython-313-darwin.so,sha256=mUvSZMr6ty3_yLX68YZ5BJQr91RHfsfhiJCWQCFBMnE,115664
|
30 |
+
charset_normalizer/md.py,sha256=iyXXQGWl54nnLQLueMWTmUtlivO0-rTBgVkmJxIIAGU,20036
|
31 |
+
charset_normalizer/md__mypyc.cpython-313-darwin.so,sha256=yf7dzVSgLFK-NZBo4MXVDlEpN4VoXWXFuLSbUTd0XFw,482184
|
32 |
+
charset_normalizer/models.py,sha256=lKXhOnIPtiakbK3i__J9wpOfzx3JDTKj7Dn3Rg0VaRI,12394
|
33 |
+
charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
34 |
+
charset_normalizer/utils.py,sha256=T5UHo8AS7NVMmgruWoZyqEf0WrZVcQpgUNetRoborSk,12002
|
35 |
+
charset_normalizer/version.py,sha256=Ambcj3O8FfvdLfDLc8dkaxZx97O1IM_R4_aKGD_TDdE,115
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: setuptools (75.6.0)
|
3 |
+
Root-Is-Purelib: false
|
4 |
+
Tag: cp313-cp313-macosx_10_13_universal2
|
5 |
+
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[console_scripts]
|
2 |
+
normalizer = charset_normalizer:cli.cli_detect
|
meow/lib/python3.13/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
charset_normalizer
|
meow/lib/python3.13/site-packages/charset_normalizer/__main__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from .cli import cli_detect
|
4 |
+
|
5 |
+
if __name__ == "__main__":
|
6 |
+
cli_detect()
|
meow/lib/python3.13/site-packages/charset_normalizer/cd.py
ADDED
@@ -0,0 +1,395 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import importlib
|
4 |
+
from codecs import IncrementalDecoder
|
5 |
+
from collections import Counter
|
6 |
+
from functools import lru_cache
|
7 |
+
from typing import Counter as TypeCounter
|
8 |
+
|
9 |
+
from .constant import (
|
10 |
+
FREQUENCIES,
|
11 |
+
KO_NAMES,
|
12 |
+
LANGUAGE_SUPPORTED_COUNT,
|
13 |
+
TOO_SMALL_SEQUENCE,
|
14 |
+
ZH_NAMES,
|
15 |
+
)
|
16 |
+
from .md import is_suspiciously_successive_range
|
17 |
+
from .models import CoherenceMatches
|
18 |
+
from .utils import (
|
19 |
+
is_accentuated,
|
20 |
+
is_latin,
|
21 |
+
is_multi_byte_encoding,
|
22 |
+
is_unicode_range_secondary,
|
23 |
+
unicode_range,
|
24 |
+
)
|
25 |
+
|
26 |
+
|
27 |
+
def encoding_unicode_range(iana_name: str) -> list[str]:
|
28 |
+
"""
|
29 |
+
Return associated unicode ranges in a single byte code page.
|
30 |
+
"""
|
31 |
+
if is_multi_byte_encoding(iana_name):
|
32 |
+
raise OSError("Function not supported on multi-byte code page")
|
33 |
+
|
34 |
+
decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder
|
35 |
+
|
36 |
+
p: IncrementalDecoder = decoder(errors="ignore")
|
37 |
+
seen_ranges: dict[str, int] = {}
|
38 |
+
character_count: int = 0
|
39 |
+
|
40 |
+
for i in range(0x40, 0xFF):
|
41 |
+
chunk: str = p.decode(bytes([i]))
|
42 |
+
|
43 |
+
if chunk:
|
44 |
+
character_range: str | None = unicode_range(chunk)
|
45 |
+
|
46 |
+
if character_range is None:
|
47 |
+
continue
|
48 |
+
|
49 |
+
if is_unicode_range_secondary(character_range) is False:
|
50 |
+
if character_range not in seen_ranges:
|
51 |
+
seen_ranges[character_range] = 0
|
52 |
+
seen_ranges[character_range] += 1
|
53 |
+
character_count += 1
|
54 |
+
|
55 |
+
return sorted(
|
56 |
+
[
|
57 |
+
character_range
|
58 |
+
for character_range in seen_ranges
|
59 |
+
if seen_ranges[character_range] / character_count >= 0.15
|
60 |
+
]
|
61 |
+
)
|
62 |
+
|
63 |
+
|
64 |
+
def unicode_range_languages(primary_range: str) -> list[str]:
|
65 |
+
"""
|
66 |
+
Return inferred languages used with a unicode range.
|
67 |
+
"""
|
68 |
+
languages: list[str] = []
|
69 |
+
|
70 |
+
for language, characters in FREQUENCIES.items():
|
71 |
+
for character in characters:
|
72 |
+
if unicode_range(character) == primary_range:
|
73 |
+
languages.append(language)
|
74 |
+
break
|
75 |
+
|
76 |
+
return languages
|
77 |
+
|
78 |
+
|
79 |
+
@lru_cache()
|
80 |
+
def encoding_languages(iana_name: str) -> list[str]:
|
81 |
+
"""
|
82 |
+
Single-byte encoding language association. Some code page are heavily linked to particular language(s).
|
83 |
+
This function does the correspondence.
|
84 |
+
"""
|
85 |
+
unicode_ranges: list[str] = encoding_unicode_range(iana_name)
|
86 |
+
primary_range: str | None = None
|
87 |
+
|
88 |
+
for specified_range in unicode_ranges:
|
89 |
+
if "Latin" not in specified_range:
|
90 |
+
primary_range = specified_range
|
91 |
+
break
|
92 |
+
|
93 |
+
if primary_range is None:
|
94 |
+
return ["Latin Based"]
|
95 |
+
|
96 |
+
return unicode_range_languages(primary_range)
|
97 |
+
|
98 |
+
|
99 |
+
@lru_cache()
|
100 |
+
def mb_encoding_languages(iana_name: str) -> list[str]:
|
101 |
+
"""
|
102 |
+
Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
|
103 |
+
This function does the correspondence.
|
104 |
+
"""
|
105 |
+
if (
|
106 |
+
iana_name.startswith("shift_")
|
107 |
+
or iana_name.startswith("iso2022_jp")
|
108 |
+
or iana_name.startswith("euc_j")
|
109 |
+
or iana_name == "cp932"
|
110 |
+
):
|
111 |
+
return ["Japanese"]
|
112 |
+
if iana_name.startswith("gb") or iana_name in ZH_NAMES:
|
113 |
+
return ["Chinese"]
|
114 |
+
if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
|
115 |
+
return ["Korean"]
|
116 |
+
|
117 |
+
return []
|
118 |
+
|
119 |
+
|
120 |
+
@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
|
121 |
+
def get_target_features(language: str) -> tuple[bool, bool]:
|
122 |
+
"""
|
123 |
+
Determine main aspects from a supported language if it contains accents and if is pure Latin.
|
124 |
+
"""
|
125 |
+
target_have_accents: bool = False
|
126 |
+
target_pure_latin: bool = True
|
127 |
+
|
128 |
+
for character in FREQUENCIES[language]:
|
129 |
+
if not target_have_accents and is_accentuated(character):
|
130 |
+
target_have_accents = True
|
131 |
+
if target_pure_latin and is_latin(character) is False:
|
132 |
+
target_pure_latin = False
|
133 |
+
|
134 |
+
return target_have_accents, target_pure_latin
|
135 |
+
|
136 |
+
|
137 |
+
def alphabet_languages(
|
138 |
+
characters: list[str], ignore_non_latin: bool = False
|
139 |
+
) -> list[str]:
|
140 |
+
"""
|
141 |
+
Return associated languages associated to given characters.
|
142 |
+
"""
|
143 |
+
languages: list[tuple[str, float]] = []
|
144 |
+
|
145 |
+
source_have_accents = any(is_accentuated(character) for character in characters)
|
146 |
+
|
147 |
+
for language, language_characters in FREQUENCIES.items():
|
148 |
+
target_have_accents, target_pure_latin = get_target_features(language)
|
149 |
+
|
150 |
+
if ignore_non_latin and target_pure_latin is False:
|
151 |
+
continue
|
152 |
+
|
153 |
+
if target_have_accents is False and source_have_accents:
|
154 |
+
continue
|
155 |
+
|
156 |
+
character_count: int = len(language_characters)
|
157 |
+
|
158 |
+
character_match_count: int = len(
|
159 |
+
[c for c in language_characters if c in characters]
|
160 |
+
)
|
161 |
+
|
162 |
+
ratio: float = character_match_count / character_count
|
163 |
+
|
164 |
+
if ratio >= 0.2:
|
165 |
+
languages.append((language, ratio))
|
166 |
+
|
167 |
+
languages = sorted(languages, key=lambda x: x[1], reverse=True)
|
168 |
+
|
169 |
+
return [compatible_language[0] for compatible_language in languages]
|
170 |
+
|
171 |
+
|
172 |
+
def characters_popularity_compare(
|
173 |
+
language: str, ordered_characters: list[str]
|
174 |
+
) -> float:
|
175 |
+
"""
|
176 |
+
Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
|
177 |
+
The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
|
178 |
+
Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
|
179 |
+
"""
|
180 |
+
if language not in FREQUENCIES:
|
181 |
+
raise ValueError(f"{language} not available")
|
182 |
+
|
183 |
+
character_approved_count: int = 0
|
184 |
+
FREQUENCIES_language_set = set(FREQUENCIES[language])
|
185 |
+
|
186 |
+
ordered_characters_count: int = len(ordered_characters)
|
187 |
+
target_language_characters_count: int = len(FREQUENCIES[language])
|
188 |
+
|
189 |
+
large_alphabet: bool = target_language_characters_count > 26
|
190 |
+
|
191 |
+
for character, character_rank in zip(
|
192 |
+
ordered_characters, range(0, ordered_characters_count)
|
193 |
+
):
|
194 |
+
if character not in FREQUENCIES_language_set:
|
195 |
+
continue
|
196 |
+
|
197 |
+
character_rank_in_language: int = FREQUENCIES[language].index(character)
|
198 |
+
expected_projection_ratio: float = (
|
199 |
+
target_language_characters_count / ordered_characters_count
|
200 |
+
)
|
201 |
+
character_rank_projection: int = int(character_rank * expected_projection_ratio)
|
202 |
+
|
203 |
+
if (
|
204 |
+
large_alphabet is False
|
205 |
+
and abs(character_rank_projection - character_rank_in_language) > 4
|
206 |
+
):
|
207 |
+
continue
|
208 |
+
|
209 |
+
if (
|
210 |
+
large_alphabet is True
|
211 |
+
and abs(character_rank_projection - character_rank_in_language)
|
212 |
+
< target_language_characters_count / 3
|
213 |
+
):
|
214 |
+
character_approved_count += 1
|
215 |
+
continue
|
216 |
+
|
217 |
+
characters_before_source: list[str] = FREQUENCIES[language][
|
218 |
+
0:character_rank_in_language
|
219 |
+
]
|
220 |
+
characters_after_source: list[str] = FREQUENCIES[language][
|
221 |
+
character_rank_in_language:
|
222 |
+
]
|
223 |
+
characters_before: list[str] = ordered_characters[0:character_rank]
|
224 |
+
characters_after: list[str] = ordered_characters[character_rank:]
|
225 |
+
|
226 |
+
before_match_count: int = len(
|
227 |
+
set(characters_before) & set(characters_before_source)
|
228 |
+
)
|
229 |
+
|
230 |
+
after_match_count: int = len(
|
231 |
+
set(characters_after) & set(characters_after_source)
|
232 |
+
)
|
233 |
+
|
234 |
+
if len(characters_before_source) == 0 and before_match_count <= 4:
|
235 |
+
character_approved_count += 1
|
236 |
+
continue
|
237 |
+
|
238 |
+
if len(characters_after_source) == 0 and after_match_count <= 4:
|
239 |
+
character_approved_count += 1
|
240 |
+
continue
|
241 |
+
|
242 |
+
if (
|
243 |
+
before_match_count / len(characters_before_source) >= 0.4
|
244 |
+
or after_match_count / len(characters_after_source) >= 0.4
|
245 |
+
):
|
246 |
+
character_approved_count += 1
|
247 |
+
continue
|
248 |
+
|
249 |
+
return character_approved_count / len(ordered_characters)
|
250 |
+
|
251 |
+
|
252 |
+
def alpha_unicode_split(decoded_sequence: str) -> list[str]:
|
253 |
+
"""
|
254 |
+
Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
|
255 |
+
Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
|
256 |
+
One containing the latin letters and the other hebrew.
|
257 |
+
"""
|
258 |
+
layers: dict[str, str] = {}
|
259 |
+
|
260 |
+
for character in decoded_sequence:
|
261 |
+
if character.isalpha() is False:
|
262 |
+
continue
|
263 |
+
|
264 |
+
character_range: str | None = unicode_range(character)
|
265 |
+
|
266 |
+
if character_range is None:
|
267 |
+
continue
|
268 |
+
|
269 |
+
layer_target_range: str | None = None
|
270 |
+
|
271 |
+
for discovered_range in layers:
|
272 |
+
if (
|
273 |
+
is_suspiciously_successive_range(discovered_range, character_range)
|
274 |
+
is False
|
275 |
+
):
|
276 |
+
layer_target_range = discovered_range
|
277 |
+
break
|
278 |
+
|
279 |
+
if layer_target_range is None:
|
280 |
+
layer_target_range = character_range
|
281 |
+
|
282 |
+
if layer_target_range not in layers:
|
283 |
+
layers[layer_target_range] = character.lower()
|
284 |
+
continue
|
285 |
+
|
286 |
+
layers[layer_target_range] += character.lower()
|
287 |
+
|
288 |
+
return list(layers.values())
|
289 |
+
|
290 |
+
|
291 |
+
def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches:
|
292 |
+
"""
|
293 |
+
This function merge results previously given by the function coherence_ratio.
|
294 |
+
The return type is the same as coherence_ratio.
|
295 |
+
"""
|
296 |
+
per_language_ratios: dict[str, list[float]] = {}
|
297 |
+
for result in results:
|
298 |
+
for sub_result in result:
|
299 |
+
language, ratio = sub_result
|
300 |
+
if language not in per_language_ratios:
|
301 |
+
per_language_ratios[language] = [ratio]
|
302 |
+
continue
|
303 |
+
per_language_ratios[language].append(ratio)
|
304 |
+
|
305 |
+
merge = [
|
306 |
+
(
|
307 |
+
language,
|
308 |
+
round(
|
309 |
+
sum(per_language_ratios[language]) / len(per_language_ratios[language]),
|
310 |
+
4,
|
311 |
+
),
|
312 |
+
)
|
313 |
+
for language in per_language_ratios
|
314 |
+
]
|
315 |
+
|
316 |
+
return sorted(merge, key=lambda x: x[1], reverse=True)
|
317 |
+
|
318 |
+
|
319 |
+
def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
|
320 |
+
"""
|
321 |
+
We shall NOT return "English—" in CoherenceMatches because it is an alternative
|
322 |
+
of "English". This function only keeps the best match and remove the em-dash in it.
|
323 |
+
"""
|
324 |
+
index_results: dict[str, list[float]] = dict()
|
325 |
+
|
326 |
+
for result in results:
|
327 |
+
language, ratio = result
|
328 |
+
no_em_name: str = language.replace("—", "")
|
329 |
+
|
330 |
+
if no_em_name not in index_results:
|
331 |
+
index_results[no_em_name] = []
|
332 |
+
|
333 |
+
index_results[no_em_name].append(ratio)
|
334 |
+
|
335 |
+
if any(len(index_results[e]) > 1 for e in index_results):
|
336 |
+
filtered_results: CoherenceMatches = []
|
337 |
+
|
338 |
+
for language in index_results:
|
339 |
+
filtered_results.append((language, max(index_results[language])))
|
340 |
+
|
341 |
+
return filtered_results
|
342 |
+
|
343 |
+
return results
|
344 |
+
|
345 |
+
|
346 |
+
@lru_cache(maxsize=2048)
|
347 |
+
def coherence_ratio(
|
348 |
+
decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None
|
349 |
+
) -> CoherenceMatches:
|
350 |
+
"""
|
351 |
+
Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
|
352 |
+
A layer = Character extraction by alphabets/ranges.
|
353 |
+
"""
|
354 |
+
|
355 |
+
results: list[tuple[str, float]] = []
|
356 |
+
ignore_non_latin: bool = False
|
357 |
+
|
358 |
+
sufficient_match_count: int = 0
|
359 |
+
|
360 |
+
lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
|
361 |
+
if "Latin Based" in lg_inclusion_list:
|
362 |
+
ignore_non_latin = True
|
363 |
+
lg_inclusion_list.remove("Latin Based")
|
364 |
+
|
365 |
+
for layer in alpha_unicode_split(decoded_sequence):
|
366 |
+
sequence_frequencies: TypeCounter[str] = Counter(layer)
|
367 |
+
most_common = sequence_frequencies.most_common()
|
368 |
+
|
369 |
+
character_count: int = sum(o for c, o in most_common)
|
370 |
+
|
371 |
+
if character_count <= TOO_SMALL_SEQUENCE:
|
372 |
+
continue
|
373 |
+
|
374 |
+
popular_character_ordered: list[str] = [c for c, o in most_common]
|
375 |
+
|
376 |
+
for language in lg_inclusion_list or alphabet_languages(
|
377 |
+
popular_character_ordered, ignore_non_latin
|
378 |
+
):
|
379 |
+
ratio: float = characters_popularity_compare(
|
380 |
+
language, popular_character_ordered
|
381 |
+
)
|
382 |
+
|
383 |
+
if ratio < threshold:
|
384 |
+
continue
|
385 |
+
elif ratio >= 0.8:
|
386 |
+
sufficient_match_count += 1
|
387 |
+
|
388 |
+
results.append((language, round(ratio, 4)))
|
389 |
+
|
390 |
+
if sufficient_match_count >= 3:
|
391 |
+
break
|
392 |
+
|
393 |
+
return sorted(
|
394 |
+
filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
|
395 |
+
)
|
meow/lib/python3.13/site-packages/charset_normalizer/constant.py
ADDED
@@ -0,0 +1,1998 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
|
4 |
+
from encodings.aliases import aliases
|
5 |
+
from re import IGNORECASE
|
6 |
+
from re import compile as re_compile
|
7 |
+
|
8 |
+
# Contain for each eligible encoding a list of/item bytes SIG/BOM
|
9 |
+
ENCODING_MARKS: dict[str, bytes | list[bytes]] = {
|
10 |
+
"utf_8": BOM_UTF8,
|
11 |
+
"utf_7": [
|
12 |
+
b"\x2b\x2f\x76\x38",
|
13 |
+
b"\x2b\x2f\x76\x39",
|
14 |
+
b"\x2b\x2f\x76\x2b",
|
15 |
+
b"\x2b\x2f\x76\x2f",
|
16 |
+
b"\x2b\x2f\x76\x38\x2d",
|
17 |
+
],
|
18 |
+
"gb18030": b"\x84\x31\x95\x33",
|
19 |
+
"utf_32": [BOM_UTF32_BE, BOM_UTF32_LE],
|
20 |
+
"utf_16": [BOM_UTF16_BE, BOM_UTF16_LE],
|
21 |
+
}
|
22 |
+
|
23 |
+
TOO_SMALL_SEQUENCE: int = 32
|
24 |
+
TOO_BIG_SEQUENCE: int = int(10e6)
|
25 |
+
|
26 |
+
UTF8_MAXIMAL_ALLOCATION: int = 1_112_064
|
27 |
+
|
28 |
+
# Up-to-date Unicode ucd/15.0.0
|
29 |
+
UNICODE_RANGES_COMBINED: dict[str, range] = {
|
30 |
+
"Control character": range(32),
|
31 |
+
"Basic Latin": range(32, 128),
|
32 |
+
"Latin-1 Supplement": range(128, 256),
|
33 |
+
"Latin Extended-A": range(256, 384),
|
34 |
+
"Latin Extended-B": range(384, 592),
|
35 |
+
"IPA Extensions": range(592, 688),
|
36 |
+
"Spacing Modifier Letters": range(688, 768),
|
37 |
+
"Combining Diacritical Marks": range(768, 880),
|
38 |
+
"Greek and Coptic": range(880, 1024),
|
39 |
+
"Cyrillic": range(1024, 1280),
|
40 |
+
"Cyrillic Supplement": range(1280, 1328),
|
41 |
+
"Armenian": range(1328, 1424),
|
42 |
+
"Hebrew": range(1424, 1536),
|
43 |
+
"Arabic": range(1536, 1792),
|
44 |
+
"Syriac": range(1792, 1872),
|
45 |
+
"Arabic Supplement": range(1872, 1920),
|
46 |
+
"Thaana": range(1920, 1984),
|
47 |
+
"NKo": range(1984, 2048),
|
48 |
+
"Samaritan": range(2048, 2112),
|
49 |
+
"Mandaic": range(2112, 2144),
|
50 |
+
"Syriac Supplement": range(2144, 2160),
|
51 |
+
"Arabic Extended-B": range(2160, 2208),
|
52 |
+
"Arabic Extended-A": range(2208, 2304),
|
53 |
+
"Devanagari": range(2304, 2432),
|
54 |
+
"Bengali": range(2432, 2560),
|
55 |
+
"Gurmukhi": range(2560, 2688),
|
56 |
+
"Gujarati": range(2688, 2816),
|
57 |
+
"Oriya": range(2816, 2944),
|
58 |
+
"Tamil": range(2944, 3072),
|
59 |
+
"Telugu": range(3072, 3200),
|
60 |
+
"Kannada": range(3200, 3328),
|
61 |
+
"Malayalam": range(3328, 3456),
|
62 |
+
"Sinhala": range(3456, 3584),
|
63 |
+
"Thai": range(3584, 3712),
|
64 |
+
"Lao": range(3712, 3840),
|
65 |
+
"Tibetan": range(3840, 4096),
|
66 |
+
"Myanmar": range(4096, 4256),
|
67 |
+
"Georgian": range(4256, 4352),
|
68 |
+
"Hangul Jamo": range(4352, 4608),
|
69 |
+
"Ethiopic": range(4608, 4992),
|
70 |
+
"Ethiopic Supplement": range(4992, 5024),
|
71 |
+
"Cherokee": range(5024, 5120),
|
72 |
+
"Unified Canadian Aboriginal Syllabics": range(5120, 5760),
|
73 |
+
"Ogham": range(5760, 5792),
|
74 |
+
"Runic": range(5792, 5888),
|
75 |
+
"Tagalog": range(5888, 5920),
|
76 |
+
"Hanunoo": range(5920, 5952),
|
77 |
+
"Buhid": range(5952, 5984),
|
78 |
+
"Tagbanwa": range(5984, 6016),
|
79 |
+
"Khmer": range(6016, 6144),
|
80 |
+
"Mongolian": range(6144, 6320),
|
81 |
+
"Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400),
|
82 |
+
"Limbu": range(6400, 6480),
|
83 |
+
"Tai Le": range(6480, 6528),
|
84 |
+
"New Tai Lue": range(6528, 6624),
|
85 |
+
"Khmer Symbols": range(6624, 6656),
|
86 |
+
"Buginese": range(6656, 6688),
|
87 |
+
"Tai Tham": range(6688, 6832),
|
88 |
+
"Combining Diacritical Marks Extended": range(6832, 6912),
|
89 |
+
"Balinese": range(6912, 7040),
|
90 |
+
"Sundanese": range(7040, 7104),
|
91 |
+
"Batak": range(7104, 7168),
|
92 |
+
"Lepcha": range(7168, 7248),
|
93 |
+
"Ol Chiki": range(7248, 7296),
|
94 |
+
"Cyrillic Extended-C": range(7296, 7312),
|
95 |
+
"Georgian Extended": range(7312, 7360),
|
96 |
+
"Sundanese Supplement": range(7360, 7376),
|
97 |
+
"Vedic Extensions": range(7376, 7424),
|
98 |
+
"Phonetic Extensions": range(7424, 7552),
|
99 |
+
"Phonetic Extensions Supplement": range(7552, 7616),
|
100 |
+
"Combining Diacritical Marks Supplement": range(7616, 7680),
|
101 |
+
"Latin Extended Additional": range(7680, 7936),
|
102 |
+
"Greek Extended": range(7936, 8192),
|
103 |
+
"General Punctuation": range(8192, 8304),
|
104 |
+
"Superscripts and Subscripts": range(8304, 8352),
|
105 |
+
"Currency Symbols": range(8352, 8400),
|
106 |
+
"Combining Diacritical Marks for Symbols": range(8400, 8448),
|
107 |
+
"Letterlike Symbols": range(8448, 8528),
|
108 |
+
"Number Forms": range(8528, 8592),
|
109 |
+
"Arrows": range(8592, 8704),
|
110 |
+
"Mathematical Operators": range(8704, 8960),
|
111 |
+
"Miscellaneous Technical": range(8960, 9216),
|
112 |
+
"Control Pictures": range(9216, 9280),
|
113 |
+
"Optical Character Recognition": range(9280, 9312),
|
114 |
+
"Enclosed Alphanumerics": range(9312, 9472),
|
115 |
+
"Box Drawing": range(9472, 9600),
|
116 |
+
"Block Elements": range(9600, 9632),
|
117 |
+
"Geometric Shapes": range(9632, 9728),
|
118 |
+
"Miscellaneous Symbols": range(9728, 9984),
|
119 |
+
"Dingbats": range(9984, 10176),
|
120 |
+
"Miscellaneous Mathematical Symbols-A": range(10176, 10224),
|
121 |
+
"Supplemental Arrows-A": range(10224, 10240),
|
122 |
+
"Braille Patterns": range(10240, 10496),
|
123 |
+
"Supplemental Arrows-B": range(10496, 10624),
|
124 |
+
"Miscellaneous Mathematical Symbols-B": range(10624, 10752),
|
125 |
+
"Supplemental Mathematical Operators": range(10752, 11008),
|
126 |
+
"Miscellaneous Symbols and Arrows": range(11008, 11264),
|
127 |
+
"Glagolitic": range(11264, 11360),
|
128 |
+
"Latin Extended-C": range(11360, 11392),
|
129 |
+
"Coptic": range(11392, 11520),
|
130 |
+
"Georgian Supplement": range(11520, 11568),
|
131 |
+
"Tifinagh": range(11568, 11648),
|
132 |
+
"Ethiopic Extended": range(11648, 11744),
|
133 |
+
"Cyrillic Extended-A": range(11744, 11776),
|
134 |
+
"Supplemental Punctuation": range(11776, 11904),
|
135 |
+
"CJK Radicals Supplement": range(11904, 12032),
|
136 |
+
"Kangxi Radicals": range(12032, 12256),
|
137 |
+
"Ideographic Description Characters": range(12272, 12288),
|
138 |
+
"CJK Symbols and Punctuation": range(12288, 12352),
|
139 |
+
"Hiragana": range(12352, 12448),
|
140 |
+
"Katakana": range(12448, 12544),
|
141 |
+
"Bopomofo": range(12544, 12592),
|
142 |
+
"Hangul Compatibility Jamo": range(12592, 12688),
|
143 |
+
"Kanbun": range(12688, 12704),
|
144 |
+
"Bopomofo Extended": range(12704, 12736),
|
145 |
+
"CJK Strokes": range(12736, 12784),
|
146 |
+
"Katakana Phonetic Extensions": range(12784, 12800),
|
147 |
+
"Enclosed CJK Letters and Months": range(12800, 13056),
|
148 |
+
"CJK Compatibility": range(13056, 13312),
|
149 |
+
"CJK Unified Ideographs Extension A": range(13312, 19904),
|
150 |
+
"Yijing Hexagram Symbols": range(19904, 19968),
|
151 |
+
"CJK Unified Ideographs": range(19968, 40960),
|
152 |
+
"Yi Syllables": range(40960, 42128),
|
153 |
+
"Yi Radicals": range(42128, 42192),
|
154 |
+
"Lisu": range(42192, 42240),
|
155 |
+
"Vai": range(42240, 42560),
|
156 |
+
"Cyrillic Extended-B": range(42560, 42656),
|
157 |
+
"Bamum": range(42656, 42752),
|
158 |
+
"Modifier Tone Letters": range(42752, 42784),
|
159 |
+
"Latin Extended-D": range(42784, 43008),
|
160 |
+
"Syloti Nagri": range(43008, 43056),
|
161 |
+
"Common Indic Number Forms": range(43056, 43072),
|
162 |
+
"Phags-pa": range(43072, 43136),
|
163 |
+
"Saurashtra": range(43136, 43232),
|
164 |
+
"Devanagari Extended": range(43232, 43264),
|
165 |
+
"Kayah Li": range(43264, 43312),
|
166 |
+
"Rejang": range(43312, 43360),
|
167 |
+
"Hangul Jamo Extended-A": range(43360, 43392),
|
168 |
+
"Javanese": range(43392, 43488),
|
169 |
+
"Myanmar Extended-B": range(43488, 43520),
|
170 |
+
"Cham": range(43520, 43616),
|
171 |
+
"Myanmar Extended-A": range(43616, 43648),
|
172 |
+
"Tai Viet": range(43648, 43744),
|
173 |
+
"Meetei Mayek Extensions": range(43744, 43776),
|
174 |
+
"Ethiopic Extended-A": range(43776, 43824),
|
175 |
+
"Latin Extended-E": range(43824, 43888),
|
176 |
+
"Cherokee Supplement": range(43888, 43968),
|
177 |
+
"Meetei Mayek": range(43968, 44032),
|
178 |
+
"Hangul Syllables": range(44032, 55216),
|
179 |
+
"Hangul Jamo Extended-B": range(55216, 55296),
|
180 |
+
"High Surrogates": range(55296, 56192),
|
181 |
+
"High Private Use Surrogates": range(56192, 56320),
|
182 |
+
"Low Surrogates": range(56320, 57344),
|
183 |
+
"Private Use Area": range(57344, 63744),
|
184 |
+
"CJK Compatibility Ideographs": range(63744, 64256),
|
185 |
+
"Alphabetic Presentation Forms": range(64256, 64336),
|
186 |
+
"Arabic Presentation Forms-A": range(64336, 65024),
|
187 |
+
"Variation Selectors": range(65024, 65040),
|
188 |
+
"Vertical Forms": range(65040, 65056),
|
189 |
+
"Combining Half Marks": range(65056, 65072),
|
190 |
+
"CJK Compatibility Forms": range(65072, 65104),
|
191 |
+
"Small Form Variants": range(65104, 65136),
|
192 |
+
"Arabic Presentation Forms-B": range(65136, 65280),
|
193 |
+
"Halfwidth and Fullwidth Forms": range(65280, 65520),
|
194 |
+
"Specials": range(65520, 65536),
|
195 |
+
"Linear B Syllabary": range(65536, 65664),
|
196 |
+
"Linear B Ideograms": range(65664, 65792),
|
197 |
+
"Aegean Numbers": range(65792, 65856),
|
198 |
+
"Ancient Greek Numbers": range(65856, 65936),
|
199 |
+
"Ancient Symbols": range(65936, 66000),
|
200 |
+
"Phaistos Disc": range(66000, 66048),
|
201 |
+
"Lycian": range(66176, 66208),
|
202 |
+
"Carian": range(66208, 66272),
|
203 |
+
"Coptic Epact Numbers": range(66272, 66304),
|
204 |
+
"Old Italic": range(66304, 66352),
|
205 |
+
"Gothic": range(66352, 66384),
|
206 |
+
"Old Permic": range(66384, 66432),
|
207 |
+
"Ugaritic": range(66432, 66464),
|
208 |
+
"Old Persian": range(66464, 66528),
|
209 |
+
"Deseret": range(66560, 66640),
|
210 |
+
"Shavian": range(66640, 66688),
|
211 |
+
"Osmanya": range(66688, 66736),
|
212 |
+
"Osage": range(66736, 66816),
|
213 |
+
"Elbasan": range(66816, 66864),
|
214 |
+
"Caucasian Albanian": range(66864, 66928),
|
215 |
+
"Vithkuqi": range(66928, 67008),
|
216 |
+
"Linear A": range(67072, 67456),
|
217 |
+
"Latin Extended-F": range(67456, 67520),
|
218 |
+
"Cypriot Syllabary": range(67584, 67648),
|
219 |
+
"Imperial Aramaic": range(67648, 67680),
|
220 |
+
"Palmyrene": range(67680, 67712),
|
221 |
+
"Nabataean": range(67712, 67760),
|
222 |
+
"Hatran": range(67808, 67840),
|
223 |
+
"Phoenician": range(67840, 67872),
|
224 |
+
"Lydian": range(67872, 67904),
|
225 |
+
"Meroitic Hieroglyphs": range(67968, 68000),
|
226 |
+
"Meroitic Cursive": range(68000, 68096),
|
227 |
+
"Kharoshthi": range(68096, 68192),
|
228 |
+
"Old South Arabian": range(68192, 68224),
|
229 |
+
"Old North Arabian": range(68224, 68256),
|
230 |
+
"Manichaean": range(68288, 68352),
|
231 |
+
"Avestan": range(68352, 68416),
|
232 |
+
"Inscriptional Parthian": range(68416, 68448),
|
233 |
+
"Inscriptional Pahlavi": range(68448, 68480),
|
234 |
+
"Psalter Pahlavi": range(68480, 68528),
|
235 |
+
"Old Turkic": range(68608, 68688),
|
236 |
+
"Old Hungarian": range(68736, 68864),
|
237 |
+
"Hanifi Rohingya": range(68864, 68928),
|
238 |
+
"Rumi Numeral Symbols": range(69216, 69248),
|
239 |
+
"Yezidi": range(69248, 69312),
|
240 |
+
"Arabic Extended-C": range(69312, 69376),
|
241 |
+
"Old Sogdian": range(69376, 69424),
|
242 |
+
"Sogdian": range(69424, 69488),
|
243 |
+
"Old Uyghur": range(69488, 69552),
|
244 |
+
"Chorasmian": range(69552, 69600),
|
245 |
+
"Elymaic": range(69600, 69632),
|
246 |
+
"Brahmi": range(69632, 69760),
|
247 |
+
"Kaithi": range(69760, 69840),
|
248 |
+
"Sora Sompeng": range(69840, 69888),
|
249 |
+
"Chakma": range(69888, 69968),
|
250 |
+
"Mahajani": range(69968, 70016),
|
251 |
+
"Sharada": range(70016, 70112),
|
252 |
+
"Sinhala Archaic Numbers": range(70112, 70144),
|
253 |
+
"Khojki": range(70144, 70224),
|
254 |
+
"Multani": range(70272, 70320),
|
255 |
+
"Khudawadi": range(70320, 70400),
|
256 |
+
"Grantha": range(70400, 70528),
|
257 |
+
"Newa": range(70656, 70784),
|
258 |
+
"Tirhuta": range(70784, 70880),
|
259 |
+
"Siddham": range(71040, 71168),
|
260 |
+
"Modi": range(71168, 71264),
|
261 |
+
"Mongolian Supplement": range(71264, 71296),
|
262 |
+
"Takri": range(71296, 71376),
|
263 |
+
"Ahom": range(71424, 71504),
|
264 |
+
"Dogra": range(71680, 71760),
|
265 |
+
"Warang Citi": range(71840, 71936),
|
266 |
+
"Dives Akuru": range(71936, 72032),
|
267 |
+
"Nandinagari": range(72096, 72192),
|
268 |
+
"Zanabazar Square": range(72192, 72272),
|
269 |
+
"Soyombo": range(72272, 72368),
|
270 |
+
"Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384),
|
271 |
+
"Pau Cin Hau": range(72384, 72448),
|
272 |
+
"Devanagari Extended-A": range(72448, 72544),
|
273 |
+
"Bhaiksuki": range(72704, 72816),
|
274 |
+
"Marchen": range(72816, 72896),
|
275 |
+
"Masaram Gondi": range(72960, 73056),
|
276 |
+
"Gunjala Gondi": range(73056, 73136),
|
277 |
+
"Makasar": range(73440, 73472),
|
278 |
+
"Kawi": range(73472, 73568),
|
279 |
+
"Lisu Supplement": range(73648, 73664),
|
280 |
+
"Tamil Supplement": range(73664, 73728),
|
281 |
+
"Cuneiform": range(73728, 74752),
|
282 |
+
"Cuneiform Numbers and Punctuation": range(74752, 74880),
|
283 |
+
"Early Dynastic Cuneiform": range(74880, 75088),
|
284 |
+
"Cypro-Minoan": range(77712, 77824),
|
285 |
+
"Egyptian Hieroglyphs": range(77824, 78896),
|
286 |
+
"Egyptian Hieroglyph Format Controls": range(78896, 78944),
|
287 |
+
"Anatolian Hieroglyphs": range(82944, 83584),
|
288 |
+
"Bamum Supplement": range(92160, 92736),
|
289 |
+
"Mro": range(92736, 92784),
|
290 |
+
"Tangsa": range(92784, 92880),
|
291 |
+
"Bassa Vah": range(92880, 92928),
|
292 |
+
"Pahawh Hmong": range(92928, 93072),
|
293 |
+
"Medefaidrin": range(93760, 93856),
|
294 |
+
"Miao": range(93952, 94112),
|
295 |
+
"Ideographic Symbols and Punctuation": range(94176, 94208),
|
296 |
+
"Tangut": range(94208, 100352),
|
297 |
+
"Tangut Components": range(100352, 101120),
|
298 |
+
"Khitan Small Script": range(101120, 101632),
|
299 |
+
"Tangut Supplement": range(101632, 101760),
|
300 |
+
"Kana Extended-B": range(110576, 110592),
|
301 |
+
"Kana Supplement": range(110592, 110848),
|
302 |
+
"Kana Extended-A": range(110848, 110896),
|
303 |
+
"Small Kana Extension": range(110896, 110960),
|
304 |
+
"Nushu": range(110960, 111360),
|
305 |
+
"Duployan": range(113664, 113824),
|
306 |
+
"Shorthand Format Controls": range(113824, 113840),
|
307 |
+
"Znamenny Musical Notation": range(118528, 118736),
|
308 |
+
"Byzantine Musical Symbols": range(118784, 119040),
|
309 |
+
"Musical Symbols": range(119040, 119296),
|
310 |
+
"Ancient Greek Musical Notation": range(119296, 119376),
|
311 |
+
"Kaktovik Numerals": range(119488, 119520),
|
312 |
+
"Mayan Numerals": range(119520, 119552),
|
313 |
+
"Tai Xuan Jing Symbols": range(119552, 119648),
|
314 |
+
"Counting Rod Numerals": range(119648, 119680),
|
315 |
+
"Mathematical Alphanumeric Symbols": range(119808, 120832),
|
316 |
+
"Sutton SignWriting": range(120832, 121520),
|
317 |
+
"Latin Extended-G": range(122624, 122880),
|
318 |
+
"Glagolitic Supplement": range(122880, 122928),
|
319 |
+
"Cyrillic Extended-D": range(122928, 123024),
|
320 |
+
"Nyiakeng Puachue Hmong": range(123136, 123216),
|
321 |
+
"Toto": range(123536, 123584),
|
322 |
+
"Wancho": range(123584, 123648),
|
323 |
+
"Nag Mundari": range(124112, 124160),
|
324 |
+
"Ethiopic Extended-B": range(124896, 124928),
|
325 |
+
"Mende Kikakui": range(124928, 125152),
|
326 |
+
"Adlam": range(125184, 125280),
|
327 |
+
"Indic Siyaq Numbers": range(126064, 126144),
|
328 |
+
"Ottoman Siyaq Numbers": range(126208, 126288),
|
329 |
+
"Arabic Mathematical Alphabetic Symbols": range(126464, 126720),
|
330 |
+
"Mahjong Tiles": range(126976, 127024),
|
331 |
+
"Domino Tiles": range(127024, 127136),
|
332 |
+
"Playing Cards": range(127136, 127232),
|
333 |
+
"Enclosed Alphanumeric Supplement": range(127232, 127488),
|
334 |
+
"Enclosed Ideographic Supplement": range(127488, 127744),
|
335 |
+
"Miscellaneous Symbols and Pictographs": range(127744, 128512),
|
336 |
+
"Emoticons range(Emoji)": range(128512, 128592),
|
337 |
+
"Ornamental Dingbats": range(128592, 128640),
|
338 |
+
"Transport and Map Symbols": range(128640, 128768),
|
339 |
+
"Alchemical Symbols": range(128768, 128896),
|
340 |
+
"Geometric Shapes Extended": range(128896, 129024),
|
341 |
+
"Supplemental Arrows-C": range(129024, 129280),
|
342 |
+
"Supplemental Symbols and Pictographs": range(129280, 129536),
|
343 |
+
"Chess Symbols": range(129536, 129648),
|
344 |
+
"Symbols and Pictographs Extended-A": range(129648, 129792),
|
345 |
+
"Symbols for Legacy Computing": range(129792, 130048),
|
346 |
+
"CJK Unified Ideographs Extension B": range(131072, 173792),
|
347 |
+
"CJK Unified Ideographs Extension C": range(173824, 177984),
|
348 |
+
"CJK Unified Ideographs Extension D": range(177984, 178208),
|
349 |
+
"CJK Unified Ideographs Extension E": range(178208, 183984),
|
350 |
+
"CJK Unified Ideographs Extension F": range(183984, 191472),
|
351 |
+
"CJK Compatibility Ideographs Supplement": range(194560, 195104),
|
352 |
+
"CJK Unified Ideographs Extension G": range(196608, 201552),
|
353 |
+
"CJK Unified Ideographs Extension H": range(201552, 205744),
|
354 |
+
"Tags": range(917504, 917632),
|
355 |
+
"Variation Selectors Supplement": range(917760, 918000),
|
356 |
+
"Supplementary Private Use Area-A": range(983040, 1048576),
|
357 |
+
"Supplementary Private Use Area-B": range(1048576, 1114112),
|
358 |
+
}
|
359 |
+
|
360 |
+
|
361 |
+
UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [
|
362 |
+
"Supplement",
|
363 |
+
"Extended",
|
364 |
+
"Extensions",
|
365 |
+
"Modifier",
|
366 |
+
"Marks",
|
367 |
+
"Punctuation",
|
368 |
+
"Symbols",
|
369 |
+
"Forms",
|
370 |
+
"Operators",
|
371 |
+
"Miscellaneous",
|
372 |
+
"Drawing",
|
373 |
+
"Block",
|
374 |
+
"Shapes",
|
375 |
+
"Supplemental",
|
376 |
+
"Tags",
|
377 |
+
]
|
378 |
+
|
379 |
+
RE_POSSIBLE_ENCODING_INDICATION = re_compile(
|
380 |
+
r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)",
|
381 |
+
IGNORECASE,
|
382 |
+
)
|
383 |
+
|
384 |
+
IANA_NO_ALIASES = [
|
385 |
+
"cp720",
|
386 |
+
"cp737",
|
387 |
+
"cp856",
|
388 |
+
"cp874",
|
389 |
+
"cp875",
|
390 |
+
"cp1006",
|
391 |
+
"koi8_r",
|
392 |
+
"koi8_t",
|
393 |
+
"koi8_u",
|
394 |
+
]
|
395 |
+
|
396 |
+
IANA_SUPPORTED: list[str] = sorted(
|
397 |
+
filter(
|
398 |
+
lambda x: x.endswith("_codec") is False
|
399 |
+
and x not in {"rot_13", "tactis", "mbcs"},
|
400 |
+
list(set(aliases.values())) + IANA_NO_ALIASES,
|
401 |
+
)
|
402 |
+
)
|
403 |
+
|
404 |
+
IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED)
|
405 |
+
|
406 |
+
# pre-computed code page that are similar using the function cp_similarity.
|
407 |
+
IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = {
|
408 |
+
"cp037": ["cp1026", "cp1140", "cp273", "cp500"],
|
409 |
+
"cp1026": ["cp037", "cp1140", "cp273", "cp500"],
|
410 |
+
"cp1125": ["cp866"],
|
411 |
+
"cp1140": ["cp037", "cp1026", "cp273", "cp500"],
|
412 |
+
"cp1250": ["iso8859_2"],
|
413 |
+
"cp1251": ["kz1048", "ptcp154"],
|
414 |
+
"cp1252": ["iso8859_15", "iso8859_9", "latin_1"],
|
415 |
+
"cp1253": ["iso8859_7"],
|
416 |
+
"cp1254": ["iso8859_15", "iso8859_9", "latin_1"],
|
417 |
+
"cp1257": ["iso8859_13"],
|
418 |
+
"cp273": ["cp037", "cp1026", "cp1140", "cp500"],
|
419 |
+
"cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"],
|
420 |
+
"cp500": ["cp037", "cp1026", "cp1140", "cp273"],
|
421 |
+
"cp850": ["cp437", "cp857", "cp858", "cp865"],
|
422 |
+
"cp857": ["cp850", "cp858", "cp865"],
|
423 |
+
"cp858": ["cp437", "cp850", "cp857", "cp865"],
|
424 |
+
"cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"],
|
425 |
+
"cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"],
|
426 |
+
"cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"],
|
427 |
+
"cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"],
|
428 |
+
"cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"],
|
429 |
+
"cp866": ["cp1125"],
|
430 |
+
"iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"],
|
431 |
+
"iso8859_11": ["tis_620"],
|
432 |
+
"iso8859_13": ["cp1257"],
|
433 |
+
"iso8859_14": [
|
434 |
+
"iso8859_10",
|
435 |
+
"iso8859_15",
|
436 |
+
"iso8859_16",
|
437 |
+
"iso8859_3",
|
438 |
+
"iso8859_9",
|
439 |
+
"latin_1",
|
440 |
+
],
|
441 |
+
"iso8859_15": [
|
442 |
+
"cp1252",
|
443 |
+
"cp1254",
|
444 |
+
"iso8859_10",
|
445 |
+
"iso8859_14",
|
446 |
+
"iso8859_16",
|
447 |
+
"iso8859_3",
|
448 |
+
"iso8859_9",
|
449 |
+
"latin_1",
|
450 |
+
],
|
451 |
+
"iso8859_16": [
|
452 |
+
"iso8859_14",
|
453 |
+
"iso8859_15",
|
454 |
+
"iso8859_2",
|
455 |
+
"iso8859_3",
|
456 |
+
"iso8859_9",
|
457 |
+
"latin_1",
|
458 |
+
],
|
459 |
+
"iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"],
|
460 |
+
"iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"],
|
461 |
+
"iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"],
|
462 |
+
"iso8859_7": ["cp1253"],
|
463 |
+
"iso8859_9": [
|
464 |
+
"cp1252",
|
465 |
+
"cp1254",
|
466 |
+
"cp1258",
|
467 |
+
"iso8859_10",
|
468 |
+
"iso8859_14",
|
469 |
+
"iso8859_15",
|
470 |
+
"iso8859_16",
|
471 |
+
"iso8859_3",
|
472 |
+
"iso8859_4",
|
473 |
+
"latin_1",
|
474 |
+
],
|
475 |
+
"kz1048": ["cp1251", "ptcp154"],
|
476 |
+
"latin_1": [
|
477 |
+
"cp1252",
|
478 |
+
"cp1254",
|
479 |
+
"cp1258",
|
480 |
+
"iso8859_10",
|
481 |
+
"iso8859_14",
|
482 |
+
"iso8859_15",
|
483 |
+
"iso8859_16",
|
484 |
+
"iso8859_3",
|
485 |
+
"iso8859_4",
|
486 |
+
"iso8859_9",
|
487 |
+
],
|
488 |
+
"mac_iceland": ["mac_roman", "mac_turkish"],
|
489 |
+
"mac_roman": ["mac_iceland", "mac_turkish"],
|
490 |
+
"mac_turkish": ["mac_iceland", "mac_roman"],
|
491 |
+
"ptcp154": ["cp1251", "kz1048"],
|
492 |
+
"tis_620": ["iso8859_11"],
|
493 |
+
}
|
494 |
+
|
495 |
+
|
496 |
+
CHARDET_CORRESPONDENCE: dict[str, str] = {
|
497 |
+
"iso2022_kr": "ISO-2022-KR",
|
498 |
+
"iso2022_jp": "ISO-2022-JP",
|
499 |
+
"euc_kr": "EUC-KR",
|
500 |
+
"tis_620": "TIS-620",
|
501 |
+
"utf_32": "UTF-32",
|
502 |
+
"euc_jp": "EUC-JP",
|
503 |
+
"koi8_r": "KOI8-R",
|
504 |
+
"iso8859_1": "ISO-8859-1",
|
505 |
+
"iso8859_2": "ISO-8859-2",
|
506 |
+
"iso8859_5": "ISO-8859-5",
|
507 |
+
"iso8859_6": "ISO-8859-6",
|
508 |
+
"iso8859_7": "ISO-8859-7",
|
509 |
+
"iso8859_8": "ISO-8859-8",
|
510 |
+
"utf_16": "UTF-16",
|
511 |
+
"cp855": "IBM855",
|
512 |
+
"mac_cyrillic": "MacCyrillic",
|
513 |
+
"gb2312": "GB2312",
|
514 |
+
"gb18030": "GB18030",
|
515 |
+
"cp932": "CP932",
|
516 |
+
"cp866": "IBM866",
|
517 |
+
"utf_8": "utf-8",
|
518 |
+
"utf_8_sig": "UTF-8-SIG",
|
519 |
+
"shift_jis": "SHIFT_JIS",
|
520 |
+
"big5": "Big5",
|
521 |
+
"cp1250": "windows-1250",
|
522 |
+
"cp1251": "windows-1251",
|
523 |
+
"cp1252": "Windows-1252",
|
524 |
+
"cp1253": "windows-1253",
|
525 |
+
"cp1255": "windows-1255",
|
526 |
+
"cp1256": "windows-1256",
|
527 |
+
"cp1254": "Windows-1254",
|
528 |
+
"cp949": "CP949",
|
529 |
+
}
|
530 |
+
|
531 |
+
|
532 |
+
COMMON_SAFE_ASCII_CHARACTERS: set[str] = {
|
533 |
+
"<",
|
534 |
+
">",
|
535 |
+
"=",
|
536 |
+
":",
|
537 |
+
"/",
|
538 |
+
"&",
|
539 |
+
";",
|
540 |
+
"{",
|
541 |
+
"}",
|
542 |
+
"[",
|
543 |
+
"]",
|
544 |
+
",",
|
545 |
+
"|",
|
546 |
+
'"',
|
547 |
+
"-",
|
548 |
+
"(",
|
549 |
+
")",
|
550 |
+
}
|
551 |
+
|
552 |
+
|
553 |
+
KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"}
|
554 |
+
ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"}
|
555 |
+
|
556 |
+
# Logging LEVEL below DEBUG
|
557 |
+
TRACE: int = 5
|
558 |
+
|
559 |
+
|
560 |
+
# Language label that contain the em dash "—"
|
561 |
+
# character are to be considered alternative seq to origin
|
562 |
+
FREQUENCIES: dict[str, list[str]] = {
|
563 |
+
"English": [
|
564 |
+
"e",
|
565 |
+
"a",
|
566 |
+
"t",
|
567 |
+
"i",
|
568 |
+
"o",
|
569 |
+
"n",
|
570 |
+
"s",
|
571 |
+
"r",
|
572 |
+
"h",
|
573 |
+
"l",
|
574 |
+
"d",
|
575 |
+
"c",
|
576 |
+
"u",
|
577 |
+
"m",
|
578 |
+
"f",
|
579 |
+
"p",
|
580 |
+
"g",
|
581 |
+
"w",
|
582 |
+
"y",
|
583 |
+
"b",
|
584 |
+
"v",
|
585 |
+
"k",
|
586 |
+
"x",
|
587 |
+
"j",
|
588 |
+
"z",
|
589 |
+
"q",
|
590 |
+
],
|
591 |
+
"English—": [
|
592 |
+
"e",
|
593 |
+
"a",
|
594 |
+
"t",
|
595 |
+
"i",
|
596 |
+
"o",
|
597 |
+
"n",
|
598 |
+
"s",
|
599 |
+
"r",
|
600 |
+
"h",
|
601 |
+
"l",
|
602 |
+
"d",
|
603 |
+
"c",
|
604 |
+
"m",
|
605 |
+
"u",
|
606 |
+
"f",
|
607 |
+
"p",
|
608 |
+
"g",
|
609 |
+
"w",
|
610 |
+
"b",
|
611 |
+
"y",
|
612 |
+
"v",
|
613 |
+
"k",
|
614 |
+
"j",
|
615 |
+
"x",
|
616 |
+
"z",
|
617 |
+
"q",
|
618 |
+
],
|
619 |
+
"German": [
|
620 |
+
"e",
|
621 |
+
"n",
|
622 |
+
"i",
|
623 |
+
"r",
|
624 |
+
"s",
|
625 |
+
"t",
|
626 |
+
"a",
|
627 |
+
"d",
|
628 |
+
"h",
|
629 |
+
"u",
|
630 |
+
"l",
|
631 |
+
"g",
|
632 |
+
"o",
|
633 |
+
"c",
|
634 |
+
"m",
|
635 |
+
"b",
|
636 |
+
"f",
|
637 |
+
"k",
|
638 |
+
"w",
|
639 |
+
"z",
|
640 |
+
"p",
|
641 |
+
"v",
|
642 |
+
"ü",
|
643 |
+
"ä",
|
644 |
+
"ö",
|
645 |
+
"j",
|
646 |
+
],
|
647 |
+
"French": [
|
648 |
+
"e",
|
649 |
+
"a",
|
650 |
+
"s",
|
651 |
+
"n",
|
652 |
+
"i",
|
653 |
+
"t",
|
654 |
+
"r",
|
655 |
+
"l",
|
656 |
+
"u",
|
657 |
+
"o",
|
658 |
+
"d",
|
659 |
+
"c",
|
660 |
+
"p",
|
661 |
+
"m",
|
662 |
+
"é",
|
663 |
+
"v",
|
664 |
+
"g",
|
665 |
+
"f",
|
666 |
+
"b",
|
667 |
+
"h",
|
668 |
+
"q",
|
669 |
+
"à",
|
670 |
+
"x",
|
671 |
+
"è",
|
672 |
+
"y",
|
673 |
+
"j",
|
674 |
+
],
|
675 |
+
"Dutch": [
|
676 |
+
"e",
|
677 |
+
"n",
|
678 |
+
"a",
|
679 |
+
"i",
|
680 |
+
"r",
|
681 |
+
"t",
|
682 |
+
"o",
|
683 |
+
"d",
|
684 |
+
"s",
|
685 |
+
"l",
|
686 |
+
"g",
|
687 |
+
"h",
|
688 |
+
"v",
|
689 |
+
"m",
|
690 |
+
"u",
|
691 |
+
"k",
|
692 |
+
"c",
|
693 |
+
"p",
|
694 |
+
"b",
|
695 |
+
"w",
|
696 |
+
"j",
|
697 |
+
"z",
|
698 |
+
"f",
|
699 |
+
"y",
|
700 |
+
"x",
|
701 |
+
"ë",
|
702 |
+
],
|
703 |
+
"Italian": [
|
704 |
+
"e",
|
705 |
+
"i",
|
706 |
+
"a",
|
707 |
+
"o",
|
708 |
+
"n",
|
709 |
+
"l",
|
710 |
+
"t",
|
711 |
+
"r",
|
712 |
+
"s",
|
713 |
+
"c",
|
714 |
+
"d",
|
715 |
+
"u",
|
716 |
+
"p",
|
717 |
+
"m",
|
718 |
+
"g",
|
719 |
+
"v",
|
720 |
+
"f",
|
721 |
+
"b",
|
722 |
+
"z",
|
723 |
+
"h",
|
724 |
+
"q",
|
725 |
+
"è",
|
726 |
+
"à",
|
727 |
+
"k",
|
728 |
+
"y",
|
729 |
+
"ò",
|
730 |
+
],
|
731 |
+
"Polish": [
|
732 |
+
"a",
|
733 |
+
"i",
|
734 |
+
"o",
|
735 |
+
"e",
|
736 |
+
"n",
|
737 |
+
"r",
|
738 |
+
"z",
|
739 |
+
"w",
|
740 |
+
"s",
|
741 |
+
"c",
|
742 |
+
"t",
|
743 |
+
"k",
|
744 |
+
"y",
|
745 |
+
"d",
|
746 |
+
"p",
|
747 |
+
"m",
|
748 |
+
"u",
|
749 |
+
"l",
|
750 |
+
"j",
|
751 |
+
"ł",
|
752 |
+
"g",
|
753 |
+
"b",
|
754 |
+
"h",
|
755 |
+
"ą",
|
756 |
+
"ę",
|
757 |
+
"ó",
|
758 |
+
],
|
759 |
+
"Spanish": [
|
760 |
+
"e",
|
761 |
+
"a",
|
762 |
+
"o",
|
763 |
+
"n",
|
764 |
+
"s",
|
765 |
+
"r",
|
766 |
+
"i",
|
767 |
+
"l",
|
768 |
+
"d",
|
769 |
+
"t",
|
770 |
+
"c",
|
771 |
+
"u",
|
772 |
+
"m",
|
773 |
+
"p",
|
774 |
+
"b",
|
775 |
+
"g",
|
776 |
+
"v",
|
777 |
+
"f",
|
778 |
+
"y",
|
779 |
+
"ó",
|
780 |
+
"h",
|
781 |
+
"q",
|
782 |
+
"í",
|
783 |
+
"j",
|
784 |
+
"z",
|
785 |
+
"á",
|
786 |
+
],
|
787 |
+
"Russian": [
|
788 |
+
"о",
|
789 |
+
"а",
|
790 |
+
"е",
|
791 |
+
"и",
|
792 |
+
"н",
|
793 |
+
"с",
|
794 |
+
"т",
|
795 |
+
"р",
|
796 |
+
"в",
|
797 |
+
"л",
|
798 |
+
"к",
|
799 |
+
"м",
|
800 |
+
"д",
|
801 |
+
"п",
|
802 |
+
"у",
|
803 |
+
"г",
|
804 |
+
"я",
|
805 |
+
"ы",
|
806 |
+
"з",
|
807 |
+
"б",
|
808 |
+
"й",
|
809 |
+
"ь",
|
810 |
+
"ч",
|
811 |
+
"х",
|
812 |
+
"ж",
|
813 |
+
"ц",
|
814 |
+
],
|
815 |
+
# Jap-Kanji
|
816 |
+
"Japanese": [
|
817 |
+
"人",
|
818 |
+
"一",
|
819 |
+
"大",
|
820 |
+
"亅",
|
821 |
+
"丁",
|
822 |
+
"丨",
|
823 |
+
"竹",
|
824 |
+
"笑",
|
825 |
+
"口",
|
826 |
+
"日",
|
827 |
+
"今",
|
828 |
+
"二",
|
829 |
+
"彳",
|
830 |
+
"行",
|
831 |
+
"十",
|
832 |
+
"土",
|
833 |
+
"丶",
|
834 |
+
"寸",
|
835 |
+
"寺",
|
836 |
+
"時",
|
837 |
+
"乙",
|
838 |
+
"丿",
|
839 |
+
"乂",
|
840 |
+
"气",
|
841 |
+
"気",
|
842 |
+
"冂",
|
843 |
+
"巾",
|
844 |
+
"亠",
|
845 |
+
"市",
|
846 |
+
"目",
|
847 |
+
"儿",
|
848 |
+
"見",
|
849 |
+
"八",
|
850 |
+
"小",
|
851 |
+
"凵",
|
852 |
+
"県",
|
853 |
+
"月",
|
854 |
+
"彐",
|
855 |
+
"門",
|
856 |
+
"間",
|
857 |
+
"木",
|
858 |
+
"東",
|
859 |
+
"山",
|
860 |
+
"出",
|
861 |
+
"本",
|
862 |
+
"中",
|
863 |
+
"刀",
|
864 |
+
"分",
|
865 |
+
"耳",
|
866 |
+
"又",
|
867 |
+
"取",
|
868 |
+
"最",
|
869 |
+
"言",
|
870 |
+
"田",
|
871 |
+
"心",
|
872 |
+
"思",
|
873 |
+
"刂",
|
874 |
+
"前",
|
875 |
+
"京",
|
876 |
+
"尹",
|
877 |
+
"事",
|
878 |
+
"生",
|
879 |
+
"厶",
|
880 |
+
"云",
|
881 |
+
"会",
|
882 |
+
"未",
|
883 |
+
"来",
|
884 |
+
"白",
|
885 |
+
"冫",
|
886 |
+
"楽",
|
887 |
+
"灬",
|
888 |
+
"馬",
|
889 |
+
"尸",
|
890 |
+
"尺",
|
891 |
+
"駅",
|
892 |
+
"明",
|
893 |
+
"耂",
|
894 |
+
"者",
|
895 |
+
"了",
|
896 |
+
"阝",
|
897 |
+
"都",
|
898 |
+
"高",
|
899 |
+
"卜",
|
900 |
+
"占",
|
901 |
+
"厂",
|
902 |
+
"广",
|
903 |
+
"店",
|
904 |
+
"子",
|
905 |
+
"申",
|
906 |
+
"奄",
|
907 |
+
"亻",
|
908 |
+
"俺",
|
909 |
+
"上",
|
910 |
+
"方",
|
911 |
+
"冖",
|
912 |
+
"学",
|
913 |
+
"衣",
|
914 |
+
"艮",
|
915 |
+
"食",
|
916 |
+
"自",
|
917 |
+
],
|
918 |
+
# Jap-Katakana
|
919 |
+
"Japanese—": [
|
920 |
+
"ー",
|
921 |
+
"ン",
|
922 |
+
"ス",
|
923 |
+
"・",
|
924 |
+
"ル",
|
925 |
+
"ト",
|
926 |
+
"リ",
|
927 |
+
"イ",
|
928 |
+
"ア",
|
929 |
+
"ラ",
|
930 |
+
"ッ",
|
931 |
+
"ク",
|
932 |
+
"ド",
|
933 |
+
"シ",
|
934 |
+
"レ",
|
935 |
+
"ジ",
|
936 |
+
"タ",
|
937 |
+
"フ",
|
938 |
+
"ロ",
|
939 |
+
"カ",
|
940 |
+
"テ",
|
941 |
+
"マ",
|
942 |
+
"ィ",
|
943 |
+
"グ",
|
944 |
+
"バ",
|
945 |
+
"ム",
|
946 |
+
"プ",
|
947 |
+
"オ",
|
948 |
+
"コ",
|
949 |
+
"デ",
|
950 |
+
"ニ",
|
951 |
+
"ウ",
|
952 |
+
"メ",
|
953 |
+
"サ",
|
954 |
+
"ビ",
|
955 |
+
"ナ",
|
956 |
+
"ブ",
|
957 |
+
"ャ",
|
958 |
+
"エ",
|
959 |
+
"ュ",
|
960 |
+
"チ",
|
961 |
+
"キ",
|
962 |
+
"ズ",
|
963 |
+
"ダ",
|
964 |
+
"パ",
|
965 |
+
"ミ",
|
966 |
+
"ェ",
|
967 |
+
"ョ",
|
968 |
+
"ハ",
|
969 |
+
"セ",
|
970 |
+
"ベ",
|
971 |
+
"ガ",
|
972 |
+
"モ",
|
973 |
+
"ツ",
|
974 |
+
"ネ",
|
975 |
+
"ボ",
|
976 |
+
"ソ",
|
977 |
+
"ノ",
|
978 |
+
"ァ",
|
979 |
+
"ヴ",
|
980 |
+
"ワ",
|
981 |
+
"ポ",
|
982 |
+
"ペ",
|
983 |
+
"ピ",
|
984 |
+
"ケ",
|
985 |
+
"ゴ",
|
986 |
+
"ギ",
|
987 |
+
"ザ",
|
988 |
+
"ホ",
|
989 |
+
"ゲ",
|
990 |
+
"ォ",
|
991 |
+
"ヤ",
|
992 |
+
"ヒ",
|
993 |
+
"ユ",
|
994 |
+
"ヨ",
|
995 |
+
"ヘ",
|
996 |
+
"ゼ",
|
997 |
+
"ヌ",
|
998 |
+
"ゥ",
|
999 |
+
"ゾ",
|
1000 |
+
"ヶ",
|
1001 |
+
"ヂ",
|
1002 |
+
"ヲ",
|
1003 |
+
"ヅ",
|
1004 |
+
"ヵ",
|
1005 |
+
"ヱ",
|
1006 |
+
"ヰ",
|
1007 |
+
"ヮ",
|
1008 |
+
"ヽ",
|
1009 |
+
"゠",
|
1010 |
+
"ヾ",
|
1011 |
+
"ヷ",
|
1012 |
+
"ヿ",
|
1013 |
+
"ヸ",
|
1014 |
+
"ヹ",
|
1015 |
+
"ヺ",
|
1016 |
+
],
|
1017 |
+
# Jap-Hiragana
|
1018 |
+
"Japanese——": [
|
1019 |
+
"の",
|
1020 |
+
"に",
|
1021 |
+
"る",
|
1022 |
+
"た",
|
1023 |
+
"と",
|
1024 |
+
"は",
|
1025 |
+
"し",
|
1026 |
+
"い",
|
1027 |
+
"を",
|
1028 |
+
"で",
|
1029 |
+
"て",
|
1030 |
+
"が",
|
1031 |
+
"な",
|
1032 |
+
"れ",
|
1033 |
+
"か",
|
1034 |
+
"ら",
|
1035 |
+
"さ",
|
1036 |
+
"っ",
|
1037 |
+
"り",
|
1038 |
+
"す",
|
1039 |
+
"あ",
|
1040 |
+
"も",
|
1041 |
+
"こ",
|
1042 |
+
"ま",
|
1043 |
+
"う",
|
1044 |
+
"く",
|
1045 |
+
"よ",
|
1046 |
+
"き",
|
1047 |
+
"ん",
|
1048 |
+
"め",
|
1049 |
+
"お",
|
1050 |
+
"け",
|
1051 |
+
"そ",
|
1052 |
+
"つ",
|
1053 |
+
"だ",
|
1054 |
+
"や",
|
1055 |
+
"え",
|
1056 |
+
"ど",
|
1057 |
+
"わ",
|
1058 |
+
"ち",
|
1059 |
+
"み",
|
1060 |
+
"せ",
|
1061 |
+
"じ",
|
1062 |
+
"ば",
|
1063 |
+
"へ",
|
1064 |
+
"び",
|
1065 |
+
"ず",
|
1066 |
+
"ろ",
|
1067 |
+
"ほ",
|
1068 |
+
"げ",
|
1069 |
+
"む",
|
1070 |
+
"べ",
|
1071 |
+
"ひ",
|
1072 |
+
"ょ",
|
1073 |
+
"ゆ",
|
1074 |
+
"ぶ",
|
1075 |
+
"ご",
|
1076 |
+
"ゃ",
|
1077 |
+
"ね",
|
1078 |
+
"ふ",
|
1079 |
+
"ぐ",
|
1080 |
+
"ぎ",
|
1081 |
+
"ぼ",
|
1082 |
+
"ゅ",
|
1083 |
+
"づ",
|
1084 |
+
"ざ",
|
1085 |
+
"ぞ",
|
1086 |
+
"ぬ",
|
1087 |
+
"ぜ",
|
1088 |
+
"ぱ",
|
1089 |
+
"ぽ",
|
1090 |
+
"ぷ",
|
1091 |
+
"ぴ",
|
1092 |
+
"ぃ",
|
1093 |
+
"ぁ",
|
1094 |
+
"ぇ",
|
1095 |
+
"ぺ",
|
1096 |
+
"ゞ",
|
1097 |
+
"ぢ",
|
1098 |
+
"ぉ",
|
1099 |
+
"ぅ",
|
1100 |
+
"ゐ",
|
1101 |
+
"ゝ",
|
1102 |
+
"ゑ",
|
1103 |
+
"゛",
|
1104 |
+
"゜",
|
1105 |
+
"ゎ",
|
1106 |
+
"ゔ",
|
1107 |
+
"゚",
|
1108 |
+
"ゟ",
|
1109 |
+
"゙",
|
1110 |
+
"ゕ",
|
1111 |
+
"ゖ",
|
1112 |
+
],
|
1113 |
+
"Portuguese": [
|
1114 |
+
"a",
|
1115 |
+
"e",
|
1116 |
+
"o",
|
1117 |
+
"s",
|
1118 |
+
"i",
|
1119 |
+
"r",
|
1120 |
+
"d",
|
1121 |
+
"n",
|
1122 |
+
"t",
|
1123 |
+
"m",
|
1124 |
+
"u",
|
1125 |
+
"c",
|
1126 |
+
"l",
|
1127 |
+
"p",
|
1128 |
+
"g",
|
1129 |
+
"v",
|
1130 |
+
"b",
|
1131 |
+
"f",
|
1132 |
+
"h",
|
1133 |
+
"ã",
|
1134 |
+
"q",
|
1135 |
+
"é",
|
1136 |
+
"ç",
|
1137 |
+
"á",
|
1138 |
+
"z",
|
1139 |
+
"í",
|
1140 |
+
],
|
1141 |
+
"Swedish": [
|
1142 |
+
"e",
|
1143 |
+
"a",
|
1144 |
+
"n",
|
1145 |
+
"r",
|
1146 |
+
"t",
|
1147 |
+
"s",
|
1148 |
+
"i",
|
1149 |
+
"l",
|
1150 |
+
"d",
|
1151 |
+
"o",
|
1152 |
+
"m",
|
1153 |
+
"k",
|
1154 |
+
"g",
|
1155 |
+
"v",
|
1156 |
+
"h",
|
1157 |
+
"f",
|
1158 |
+
"u",
|
1159 |
+
"p",
|
1160 |
+
"ä",
|
1161 |
+
"c",
|
1162 |
+
"b",
|
1163 |
+
"ö",
|
1164 |
+
"å",
|
1165 |
+
"y",
|
1166 |
+
"j",
|
1167 |
+
"x",
|
1168 |
+
],
|
1169 |
+
"Chinese": [
|
1170 |
+
"的",
|
1171 |
+
"一",
|
1172 |
+
"是",
|
1173 |
+
"不",
|
1174 |
+
"了",
|
1175 |
+
"在",
|
1176 |
+
"人",
|
1177 |
+
"有",
|
1178 |
+
"我",
|
1179 |
+
"他",
|
1180 |
+
"这",
|
1181 |
+
"个",
|
1182 |
+
"们",
|
1183 |
+
"中",
|
1184 |
+
"来",
|
1185 |
+
"上",
|
1186 |
+
"大",
|
1187 |
+
"为",
|
1188 |
+
"和",
|
1189 |
+
"国",
|
1190 |
+
"地",
|
1191 |
+
"到",
|
1192 |
+
"以",
|
1193 |
+
"说",
|
1194 |
+
"时",
|
1195 |
+
"要",
|
1196 |
+
"就",
|
1197 |
+
"出",
|
1198 |
+
"会",
|
1199 |
+
"可",
|
1200 |
+
"也",
|
1201 |
+
"你",
|
1202 |
+
"对",
|
1203 |
+
"生",
|
1204 |
+
"能",
|
1205 |
+
"而",
|
1206 |
+
"子",
|
1207 |
+
"那",
|
1208 |
+
"得",
|
1209 |
+
"于",
|
1210 |
+
"着",
|
1211 |
+
"下",
|
1212 |
+
"自",
|
1213 |
+
"之",
|
1214 |
+
"年",
|
1215 |
+
"过",
|
1216 |
+
"发",
|
1217 |
+
"后",
|
1218 |
+
"作",
|
1219 |
+
"里",
|
1220 |
+
"用",
|
1221 |
+
"道",
|
1222 |
+
"行",
|
1223 |
+
"所",
|
1224 |
+
"然",
|
1225 |
+
"家",
|
1226 |
+
"种",
|
1227 |
+
"事",
|
1228 |
+
"成",
|
1229 |
+
"方",
|
1230 |
+
"多",
|
1231 |
+
"经",
|
1232 |
+
"么",
|
1233 |
+
"去",
|
1234 |
+
"法",
|
1235 |
+
"学",
|
1236 |
+
"如",
|
1237 |
+
"都",
|
1238 |
+
"同",
|
1239 |
+
"现",
|
1240 |
+
"当",
|
1241 |
+
"没",
|
1242 |
+
"动",
|
1243 |
+
"面",
|
1244 |
+
"起",
|
1245 |
+
"看",
|
1246 |
+
"定",
|
1247 |
+
"天",
|
1248 |
+
"分",
|
1249 |
+
"还",
|
1250 |
+
"进",
|
1251 |
+
"好",
|
1252 |
+
"小",
|
1253 |
+
"部",
|
1254 |
+
"其",
|
1255 |
+
"些",
|
1256 |
+
"主",
|
1257 |
+
"样",
|
1258 |
+
"理",
|
1259 |
+
"心",
|
1260 |
+
"她",
|
1261 |
+
"本",
|
1262 |
+
"前",
|
1263 |
+
"开",
|
1264 |
+
"但",
|
1265 |
+
"因",
|
1266 |
+
"只",
|
1267 |
+
"从",
|
1268 |
+
"想",
|
1269 |
+
"实",
|
1270 |
+
],
|
1271 |
+
"Ukrainian": [
|
1272 |
+
"о",
|
1273 |
+
"а",
|
1274 |
+
"н",
|
1275 |
+
"і",
|
1276 |
+
"и",
|
1277 |
+
"р",
|
1278 |
+
"в",
|
1279 |
+
"т",
|
1280 |
+
"е",
|
1281 |
+
"с",
|
1282 |
+
"к",
|
1283 |
+
"л",
|
1284 |
+
"у",
|
1285 |
+
"д",
|
1286 |
+
"м",
|
1287 |
+
"п",
|
1288 |
+
"з",
|
1289 |
+
"я",
|
1290 |
+
"ь",
|
1291 |
+
"б",
|
1292 |
+
"г",
|
1293 |
+
"й",
|
1294 |
+
"ч",
|
1295 |
+
"х",
|
1296 |
+
"ц",
|
1297 |
+
"ї",
|
1298 |
+
],
|
1299 |
+
"Norwegian": [
|
1300 |
+
"e",
|
1301 |
+
"r",
|
1302 |
+
"n",
|
1303 |
+
"t",
|
1304 |
+
"a",
|
1305 |
+
"s",
|
1306 |
+
"i",
|
1307 |
+
"o",
|
1308 |
+
"l",
|
1309 |
+
"d",
|
1310 |
+
"g",
|
1311 |
+
"k",
|
1312 |
+
"m",
|
1313 |
+
"v",
|
1314 |
+
"f",
|
1315 |
+
"p",
|
1316 |
+
"u",
|
1317 |
+
"b",
|
1318 |
+
"h",
|
1319 |
+
"å",
|
1320 |
+
"y",
|
1321 |
+
"j",
|
1322 |
+
"ø",
|
1323 |
+
"c",
|
1324 |
+
"æ",
|
1325 |
+
"w",
|
1326 |
+
],
|
1327 |
+
"Finnish": [
|
1328 |
+
"a",
|
1329 |
+
"i",
|
1330 |
+
"n",
|
1331 |
+
"t",
|
1332 |
+
"e",
|
1333 |
+
"s",
|
1334 |
+
"l",
|
1335 |
+
"o",
|
1336 |
+
"u",
|
1337 |
+
"k",
|
1338 |
+
"ä",
|
1339 |
+
"m",
|
1340 |
+
"r",
|
1341 |
+
"v",
|
1342 |
+
"j",
|
1343 |
+
"h",
|
1344 |
+
"p",
|
1345 |
+
"y",
|
1346 |
+
"d",
|
1347 |
+
"ö",
|
1348 |
+
"g",
|
1349 |
+
"c",
|
1350 |
+
"b",
|
1351 |
+
"f",
|
1352 |
+
"w",
|
1353 |
+
"z",
|
1354 |
+
],
|
1355 |
+
"Vietnamese": [
|
1356 |
+
"n",
|
1357 |
+
"h",
|
1358 |
+
"t",
|
1359 |
+
"i",
|
1360 |
+
"c",
|
1361 |
+
"g",
|
1362 |
+
"a",
|
1363 |
+
"o",
|
1364 |
+
"u",
|
1365 |
+
"m",
|
1366 |
+
"l",
|
1367 |
+
"r",
|
1368 |
+
"à",
|
1369 |
+
"đ",
|
1370 |
+
"s",
|
1371 |
+
"e",
|
1372 |
+
"v",
|
1373 |
+
"p",
|
1374 |
+
"b",
|
1375 |
+
"y",
|
1376 |
+
"ư",
|
1377 |
+
"d",
|
1378 |
+
"á",
|
1379 |
+
"k",
|
1380 |
+
"ộ",
|
1381 |
+
"ế",
|
1382 |
+
],
|
1383 |
+
"Czech": [
|
1384 |
+
"o",
|
1385 |
+
"e",
|
1386 |
+
"a",
|
1387 |
+
"n",
|
1388 |
+
"t",
|
1389 |
+
"s",
|
1390 |
+
"i",
|
1391 |
+
"l",
|
1392 |
+
"v",
|
1393 |
+
"r",
|
1394 |
+
"k",
|
1395 |
+
"d",
|
1396 |
+
"u",
|
1397 |
+
"m",
|
1398 |
+
"p",
|
1399 |
+
"í",
|
1400 |
+
"c",
|
1401 |
+
"h",
|
1402 |
+
"z",
|
1403 |
+
"á",
|
1404 |
+
"y",
|
1405 |
+
"j",
|
1406 |
+
"b",
|
1407 |
+
"ě",
|
1408 |
+
"é",
|
1409 |
+
"ř",
|
1410 |
+
],
|
1411 |
+
"Hungarian": [
|
1412 |
+
"e",
|
1413 |
+
"a",
|
1414 |
+
"t",
|
1415 |
+
"l",
|
1416 |
+
"s",
|
1417 |
+
"n",
|
1418 |
+
"k",
|
1419 |
+
"r",
|
1420 |
+
"i",
|
1421 |
+
"o",
|
1422 |
+
"z",
|
1423 |
+
"á",
|
1424 |
+
"é",
|
1425 |
+
"g",
|
1426 |
+
"m",
|
1427 |
+
"b",
|
1428 |
+
"y",
|
1429 |
+
"v",
|
1430 |
+
"d",
|
1431 |
+
"h",
|
1432 |
+
"u",
|
1433 |
+
"p",
|
1434 |
+
"j",
|
1435 |
+
"ö",
|
1436 |
+
"f",
|
1437 |
+
"c",
|
1438 |
+
],
|
1439 |
+
"Korean": [
|
1440 |
+
"이",
|
1441 |
+
"다",
|
1442 |
+
"에",
|
1443 |
+
"의",
|
1444 |
+
"는",
|
1445 |
+
"로",
|
1446 |
+
"하",
|
1447 |
+
"을",
|
1448 |
+
"가",
|
1449 |
+
"고",
|
1450 |
+
"지",
|
1451 |
+
"서",
|
1452 |
+
"한",
|
1453 |
+
"은",
|
1454 |
+
"기",
|
1455 |
+
"으",
|
1456 |
+
"년",
|
1457 |
+
"대",
|
1458 |
+
"사",
|
1459 |
+
"시",
|
1460 |
+
"를",
|
1461 |
+
"리",
|
1462 |
+
"도",
|
1463 |
+
"인",
|
1464 |
+
"스",
|
1465 |
+
"일",
|
1466 |
+
],
|
1467 |
+
"Indonesian": [
|
1468 |
+
"a",
|
1469 |
+
"n",
|
1470 |
+
"e",
|
1471 |
+
"i",
|
1472 |
+
"r",
|
1473 |
+
"t",
|
1474 |
+
"u",
|
1475 |
+
"s",
|
1476 |
+
"d",
|
1477 |
+
"k",
|
1478 |
+
"m",
|
1479 |
+
"l",
|
1480 |
+
"g",
|
1481 |
+
"p",
|
1482 |
+
"b",
|
1483 |
+
"o",
|
1484 |
+
"h",
|
1485 |
+
"y",
|
1486 |
+
"j",
|
1487 |
+
"c",
|
1488 |
+
"w",
|
1489 |
+
"f",
|
1490 |
+
"v",
|
1491 |
+
"z",
|
1492 |
+
"x",
|
1493 |
+
"q",
|
1494 |
+
],
|
1495 |
+
"Turkish": [
|
1496 |
+
"a",
|
1497 |
+
"e",
|
1498 |
+
"i",
|
1499 |
+
"n",
|
1500 |
+
"r",
|
1501 |
+
"l",
|
1502 |
+
"ı",
|
1503 |
+
"k",
|
1504 |
+
"d",
|
1505 |
+
"t",
|
1506 |
+
"s",
|
1507 |
+
"m",
|
1508 |
+
"y",
|
1509 |
+
"u",
|
1510 |
+
"o",
|
1511 |
+
"b",
|
1512 |
+
"ü",
|
1513 |
+
"ş",
|
1514 |
+
"v",
|
1515 |
+
"g",
|
1516 |
+
"z",
|
1517 |
+
"h",
|
1518 |
+
"c",
|
1519 |
+
"p",
|
1520 |
+
"ç",
|
1521 |
+
"ğ",
|
1522 |
+
],
|
1523 |
+
"Romanian": [
|
1524 |
+
"e",
|
1525 |
+
"i",
|
1526 |
+
"a",
|
1527 |
+
"r",
|
1528 |
+
"n",
|
1529 |
+
"t",
|
1530 |
+
"u",
|
1531 |
+
"l",
|
1532 |
+
"o",
|
1533 |
+
"c",
|
1534 |
+
"s",
|
1535 |
+
"d",
|
1536 |
+
"p",
|
1537 |
+
"m",
|
1538 |
+
"ă",
|
1539 |
+
"f",
|
1540 |
+
"v",
|
1541 |
+
"î",
|
1542 |
+
"g",
|
1543 |
+
"b",
|
1544 |
+
"ș",
|
1545 |
+
"ț",
|
1546 |
+
"z",
|
1547 |
+
"h",
|
1548 |
+
"â",
|
1549 |
+
"j",
|
1550 |
+
],
|
1551 |
+
"Farsi": [
|
1552 |
+
"ا",
|
1553 |
+
"ی",
|
1554 |
+
"ر",
|
1555 |
+
"د",
|
1556 |
+
"ن",
|
1557 |
+
"ه",
|
1558 |
+
"و",
|
1559 |
+
"م",
|
1560 |
+
"ت",
|
1561 |
+
"ب",
|
1562 |
+
"س",
|
1563 |
+
"ل",
|
1564 |
+
"ک",
|
1565 |
+
"ش",
|
1566 |
+
"ز",
|
1567 |
+
"ف",
|
1568 |
+
"گ",
|
1569 |
+
"ع",
|
1570 |
+
"خ",
|
1571 |
+
"ق",
|
1572 |
+
"ج",
|
1573 |
+
"آ",
|
1574 |
+
"پ",
|
1575 |
+
"ح",
|
1576 |
+
"ط",
|
1577 |
+
"ص",
|
1578 |
+
],
|
1579 |
+
"Arabic": [
|
1580 |
+
"ا",
|
1581 |
+
"ل",
|
1582 |
+
"ي",
|
1583 |
+
"م",
|
1584 |
+
"و",
|
1585 |
+
"ن",
|
1586 |
+
"ر",
|
1587 |
+
"ت",
|
1588 |
+
"ب",
|
1589 |
+
"ة",
|
1590 |
+
"ع",
|
1591 |
+
"د",
|
1592 |
+
"س",
|
1593 |
+
"ف",
|
1594 |
+
"ه",
|
1595 |
+
"ك",
|
1596 |
+
"ق",
|
1597 |
+
"أ",
|
1598 |
+
"ح",
|
1599 |
+
"ج",
|
1600 |
+
"ش",
|
1601 |
+
"ط",
|
1602 |
+
"ص",
|
1603 |
+
"ى",
|
1604 |
+
"خ",
|
1605 |
+
"إ",
|
1606 |
+
],
|
1607 |
+
"Danish": [
|
1608 |
+
"e",
|
1609 |
+
"r",
|
1610 |
+
"n",
|
1611 |
+
"t",
|
1612 |
+
"a",
|
1613 |
+
"i",
|
1614 |
+
"s",
|
1615 |
+
"d",
|
1616 |
+
"l",
|
1617 |
+
"o",
|
1618 |
+
"g",
|
1619 |
+
"m",
|
1620 |
+
"k",
|
1621 |
+
"f",
|
1622 |
+
"v",
|
1623 |
+
"u",
|
1624 |
+
"b",
|
1625 |
+
"h",
|
1626 |
+
"p",
|
1627 |
+
"å",
|
1628 |
+
"y",
|
1629 |
+
"ø",
|
1630 |
+
"æ",
|
1631 |
+
"c",
|
1632 |
+
"j",
|
1633 |
+
"w",
|
1634 |
+
],
|
1635 |
+
"Serbian": [
|
1636 |
+
"а",
|
1637 |
+
"и",
|
1638 |
+
"о",
|
1639 |
+
"е",
|
1640 |
+
"н",
|
1641 |
+
"р",
|
1642 |
+
"с",
|
1643 |
+
"у",
|
1644 |
+
"т",
|
1645 |
+
"к",
|
1646 |
+
"ј",
|
1647 |
+
"в",
|
1648 |
+
"д",
|
1649 |
+
"м",
|
1650 |
+
"п",
|
1651 |
+
"л",
|
1652 |
+
"г",
|
1653 |
+
"з",
|
1654 |
+
"б",
|
1655 |
+
"a",
|
1656 |
+
"i",
|
1657 |
+
"e",
|
1658 |
+
"o",
|
1659 |
+
"n",
|
1660 |
+
"ц",
|
1661 |
+
"ш",
|
1662 |
+
],
|
1663 |
+
"Lithuanian": [
|
1664 |
+
"i",
|
1665 |
+
"a",
|
1666 |
+
"s",
|
1667 |
+
"o",
|
1668 |
+
"r",
|
1669 |
+
"e",
|
1670 |
+
"t",
|
1671 |
+
"n",
|
1672 |
+
"u",
|
1673 |
+
"k",
|
1674 |
+
"m",
|
1675 |
+
"l",
|
1676 |
+
"p",
|
1677 |
+
"v",
|
1678 |
+
"d",
|
1679 |
+
"j",
|
1680 |
+
"g",
|
1681 |
+
"ė",
|
1682 |
+
"b",
|
1683 |
+
"y",
|
1684 |
+
"ų",
|
1685 |
+
"š",
|
1686 |
+
"ž",
|
1687 |
+
"c",
|
1688 |
+
"ą",
|
1689 |
+
"į",
|
1690 |
+
],
|
1691 |
+
"Slovene": [
|
1692 |
+
"e",
|
1693 |
+
"a",
|
1694 |
+
"i",
|
1695 |
+
"o",
|
1696 |
+
"n",
|
1697 |
+
"r",
|
1698 |
+
"s",
|
1699 |
+
"l",
|
1700 |
+
"t",
|
1701 |
+
"j",
|
1702 |
+
"v",
|
1703 |
+
"k",
|
1704 |
+
"d",
|
1705 |
+
"p",
|
1706 |
+
"m",
|
1707 |
+
"u",
|
1708 |
+
"z",
|
1709 |
+
"b",
|
1710 |
+
"g",
|
1711 |
+
"h",
|
1712 |
+
"č",
|
1713 |
+
"c",
|
1714 |
+
"š",
|
1715 |
+
"ž",
|
1716 |
+
"f",
|
1717 |
+
"y",
|
1718 |
+
],
|
1719 |
+
"Slovak": [
|
1720 |
+
"o",
|
1721 |
+
"a",
|
1722 |
+
"e",
|
1723 |
+
"n",
|
1724 |
+
"i",
|
1725 |
+
"r",
|
1726 |
+
"v",
|
1727 |
+
"t",
|
1728 |
+
"s",
|
1729 |
+
"l",
|
1730 |
+
"k",
|
1731 |
+
"d",
|
1732 |
+
"m",
|
1733 |
+
"p",
|
1734 |
+
"u",
|
1735 |
+
"c",
|
1736 |
+
"h",
|
1737 |
+
"j",
|
1738 |
+
"b",
|
1739 |
+
"z",
|
1740 |
+
"á",
|
1741 |
+
"y",
|
1742 |
+
"ý",
|
1743 |
+
"í",
|
1744 |
+
"č",
|
1745 |
+
"é",
|
1746 |
+
],
|
1747 |
+
"Hebrew": [
|
1748 |
+
"י",
|
1749 |
+
"ו",
|
1750 |
+
"ה",
|
1751 |
+
"ל",
|
1752 |
+
"ר",
|
1753 |
+
"ב",
|
1754 |
+
"ת",
|
1755 |
+
"מ",
|
1756 |
+
"א",
|
1757 |
+
"ש",
|
1758 |
+
"נ",
|
1759 |
+
"ע",
|
1760 |
+
"ם",
|
1761 |
+
"ד",
|
1762 |
+
"ק",
|
1763 |
+
"ח",
|
1764 |
+
"פ",
|
1765 |
+
"ס",
|
1766 |
+
"כ",
|
1767 |
+
"ג",
|
1768 |
+
"ט",
|
1769 |
+
"צ",
|
1770 |
+
"ן",
|
1771 |
+
"ז",
|
1772 |
+
"ך",
|
1773 |
+
],
|
1774 |
+
"Bulgarian": [
|
1775 |
+
"а",
|
1776 |
+
"и",
|
1777 |
+
"о",
|
1778 |
+
"е",
|
1779 |
+
"н",
|
1780 |
+
"т",
|
1781 |
+
"р",
|
1782 |
+
"с",
|
1783 |
+
"в",
|
1784 |
+
"л",
|
1785 |
+
"к",
|
1786 |
+
"д",
|
1787 |
+
"п",
|
1788 |
+
"м",
|
1789 |
+
"з",
|
1790 |
+
"г",
|
1791 |
+
"я",
|
1792 |
+
"ъ",
|
1793 |
+
"у",
|
1794 |
+
"б",
|
1795 |
+
"ч",
|
1796 |
+
"ц",
|
1797 |
+
"й",
|
1798 |
+
"ж",
|
1799 |
+
"щ",
|
1800 |
+
"х",
|
1801 |
+
],
|
1802 |
+
"Croatian": [
|
1803 |
+
"a",
|
1804 |
+
"i",
|
1805 |
+
"o",
|
1806 |
+
"e",
|
1807 |
+
"n",
|
1808 |
+
"r",
|
1809 |
+
"j",
|
1810 |
+
"s",
|
1811 |
+
"t",
|
1812 |
+
"u",
|
1813 |
+
"k",
|
1814 |
+
"l",
|
1815 |
+
"v",
|
1816 |
+
"d",
|
1817 |
+
"m",
|
1818 |
+
"p",
|
1819 |
+
"g",
|
1820 |
+
"z",
|
1821 |
+
"b",
|
1822 |
+
"c",
|
1823 |
+
"č",
|
1824 |
+
"h",
|
1825 |
+
"š",
|
1826 |
+
"ž",
|
1827 |
+
"ć",
|
1828 |
+
"f",
|
1829 |
+
],
|
1830 |
+
"Hindi": [
|
1831 |
+
"क",
|
1832 |
+
"र",
|
1833 |
+
"स",
|
1834 |
+
"न",
|
1835 |
+
"त",
|
1836 |
+
"म",
|
1837 |
+
"ह",
|
1838 |
+
"प",
|
1839 |
+
"य",
|
1840 |
+
"ल",
|
1841 |
+
"व",
|
1842 |
+
"ज",
|
1843 |
+
"द",
|
1844 |
+
"ग",
|
1845 |
+
"ब",
|
1846 |
+
"श",
|
1847 |
+
"ट",
|
1848 |
+
"अ",
|
1849 |
+
"ए",
|
1850 |
+
"थ",
|
1851 |
+
"भ",
|
1852 |
+
"ड",
|
1853 |
+
"च",
|
1854 |
+
"ध",
|
1855 |
+
"ष",
|
1856 |
+
"इ",
|
1857 |
+
],
|
1858 |
+
"Estonian": [
|
1859 |
+
"a",
|
1860 |
+
"i",
|
1861 |
+
"e",
|
1862 |
+
"s",
|
1863 |
+
"t",
|
1864 |
+
"l",
|
1865 |
+
"u",
|
1866 |
+
"n",
|
1867 |
+
"o",
|
1868 |
+
"k",
|
1869 |
+
"r",
|
1870 |
+
"d",
|
1871 |
+
"m",
|
1872 |
+
"v",
|
1873 |
+
"g",
|
1874 |
+
"p",
|
1875 |
+
"j",
|
1876 |
+
"h",
|
1877 |
+
"ä",
|
1878 |
+
"b",
|
1879 |
+
"õ",
|
1880 |
+
"ü",
|
1881 |
+
"f",
|
1882 |
+
"c",
|
1883 |
+
"ö",
|
1884 |
+
"y",
|
1885 |
+
],
|
1886 |
+
"Thai": [
|
1887 |
+
"า",
|
1888 |
+
"น",
|
1889 |
+
"ร",
|
1890 |
+
"อ",
|
1891 |
+
"ก",
|
1892 |
+
"เ",
|
1893 |
+
"ง",
|
1894 |
+
"ม",
|
1895 |
+
"ย",
|
1896 |
+
"ล",
|
1897 |
+
"ว",
|
1898 |
+
"ด",
|
1899 |
+
"ท",
|
1900 |
+
"ส",
|
1901 |
+
"ต",
|
1902 |
+
"ะ",
|
1903 |
+
"ป",
|
1904 |
+
"บ",
|
1905 |
+
"ค",
|
1906 |
+
"ห",
|
1907 |
+
"แ",
|
1908 |
+
"จ",
|
1909 |
+
"พ",
|
1910 |
+
"ช",
|
1911 |
+
"ข",
|
1912 |
+
"ใ",
|
1913 |
+
],
|
1914 |
+
"Greek": [
|
1915 |
+
"α",
|
1916 |
+
"τ",
|
1917 |
+
"ο",
|
1918 |
+
"ι",
|
1919 |
+
"ε",
|
1920 |
+
"ν",
|
1921 |
+
"ρ",
|
1922 |
+
"σ",
|
1923 |
+
"κ",
|
1924 |
+
"η",
|
1925 |
+
"π",
|
1926 |
+
"ς",
|
1927 |
+
"υ",
|
1928 |
+
"μ",
|
1929 |
+
"λ",
|
1930 |
+
"ί",
|
1931 |
+
"ό",
|
1932 |
+
"ά",
|
1933 |
+
"γ",
|
1934 |
+
"έ",
|
1935 |
+
"δ",
|
1936 |
+
"ή",
|
1937 |
+
"ω",
|
1938 |
+
"χ",
|
1939 |
+
"θ",
|
1940 |
+
"ύ",
|
1941 |
+
],
|
1942 |
+
"Tamil": [
|
1943 |
+
"க",
|
1944 |
+
"த",
|
1945 |
+
"ப",
|
1946 |
+
"ட",
|
1947 |
+
"ர",
|
1948 |
+
"ம",
|
1949 |
+
"ல",
|
1950 |
+
"ன",
|
1951 |
+
"வ",
|
1952 |
+
"ற",
|
1953 |
+
"ய",
|
1954 |
+
"ள",
|
1955 |
+
"ச",
|
1956 |
+
"ந",
|
1957 |
+
"இ",
|
1958 |
+
"ண",
|
1959 |
+
"அ",
|
1960 |
+
"ஆ",
|
1961 |
+
"ழ",
|
1962 |
+
"ங",
|
1963 |
+
"எ",
|
1964 |
+
"உ",
|
1965 |
+
"ஒ",
|
1966 |
+
"ஸ",
|
1967 |
+
],
|
1968 |
+
"Kazakh": [
|
1969 |
+
"а",
|
1970 |
+
"ы",
|
1971 |
+
"е",
|
1972 |
+
"н",
|
1973 |
+
"т",
|
1974 |
+
"р",
|
1975 |
+
"л",
|
1976 |
+
"і",
|
1977 |
+
"д",
|
1978 |
+
"с",
|
1979 |
+
"м",
|
1980 |
+
"қ",
|
1981 |
+
"к",
|
1982 |
+
"о",
|
1983 |
+
"б",
|
1984 |
+
"и",
|
1985 |
+
"у",
|
1986 |
+
"ғ",
|
1987 |
+
"ж",
|
1988 |
+
"ң",
|
1989 |
+
"з",
|
1990 |
+
"ш",
|
1991 |
+
"й",
|
1992 |
+
"п",
|
1993 |
+
"г",
|
1994 |
+
"ө",
|
1995 |
+
],
|
1996 |
+
}
|
1997 |
+
|
1998 |
+
LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES)
|
meow/lib/python3.13/site-packages/charset_normalizer/md.cpython-313-darwin.so
ADDED
Binary file (116 kB). View file
|
|
meow/lib/python3.13/site-packages/charset_normalizer/md__mypyc.cpython-313-darwin.so
ADDED
Binary file (482 kB). View file
|
|
meow/lib/python3.13/site-packages/charset_normalizer/py.typed
ADDED
File without changes
|
meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/METADATA
ADDED
@@ -0,0 +1,279 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.4
|
2 |
+
Name: fsspec
|
3 |
+
Version: 2024.12.0
|
4 |
+
Summary: File-system specification
|
5 |
+
Project-URL: Changelog, https://filesystem-spec.readthedocs.io/en/latest/changelog.html
|
6 |
+
Project-URL: Documentation, https://filesystem-spec.readthedocs.io/en/latest/
|
7 |
+
Project-URL: Homepage, https://github.com/fsspec/filesystem_spec
|
8 |
+
Maintainer-email: Martin Durant <[email protected]>
|
9 |
+
License: BSD 3-Clause License
|
10 |
+
|
11 |
+
Copyright (c) 2018, Martin Durant
|
12 |
+
All rights reserved.
|
13 |
+
|
14 |
+
Redistribution and use in source and binary forms, with or without
|
15 |
+
modification, are permitted provided that the following conditions are met:
|
16 |
+
|
17 |
+
* Redistributions of source code must retain the above copyright notice, this
|
18 |
+
list of conditions and the following disclaimer.
|
19 |
+
|
20 |
+
* Redistributions in binary form must reproduce the above copyright notice,
|
21 |
+
this list of conditions and the following disclaimer in the documentation
|
22 |
+
and/or other materials provided with the distribution.
|
23 |
+
|
24 |
+
* Neither the name of the copyright holder nor the names of its
|
25 |
+
contributors may be used to endorse or promote products derived from
|
26 |
+
this software without specific prior written permission.
|
27 |
+
|
28 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
29 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
30 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
31 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
32 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
33 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
34 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
35 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
36 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
37 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
38 |
+
License-File: LICENSE
|
39 |
+
Keywords: file
|
40 |
+
Classifier: Development Status :: 4 - Beta
|
41 |
+
Classifier: Intended Audience :: Developers
|
42 |
+
Classifier: License :: OSI Approved :: BSD License
|
43 |
+
Classifier: Operating System :: OS Independent
|
44 |
+
Classifier: Programming Language :: Python :: 3.8
|
45 |
+
Classifier: Programming Language :: Python :: 3.9
|
46 |
+
Classifier: Programming Language :: Python :: 3.10
|
47 |
+
Classifier: Programming Language :: Python :: 3.11
|
48 |
+
Classifier: Programming Language :: Python :: 3.12
|
49 |
+
Classifier: Programming Language :: Python :: 3.13
|
50 |
+
Requires-Python: >=3.8
|
51 |
+
Provides-Extra: abfs
|
52 |
+
Requires-Dist: adlfs; extra == 'abfs'
|
53 |
+
Provides-Extra: adl
|
54 |
+
Requires-Dist: adlfs; extra == 'adl'
|
55 |
+
Provides-Extra: arrow
|
56 |
+
Requires-Dist: pyarrow>=1; extra == 'arrow'
|
57 |
+
Provides-Extra: dask
|
58 |
+
Requires-Dist: dask; extra == 'dask'
|
59 |
+
Requires-Dist: distributed; extra == 'dask'
|
60 |
+
Provides-Extra: dev
|
61 |
+
Requires-Dist: pre-commit; extra == 'dev'
|
62 |
+
Requires-Dist: ruff; extra == 'dev'
|
63 |
+
Provides-Extra: doc
|
64 |
+
Requires-Dist: numpydoc; extra == 'doc'
|
65 |
+
Requires-Dist: sphinx; extra == 'doc'
|
66 |
+
Requires-Dist: sphinx-design; extra == 'doc'
|
67 |
+
Requires-Dist: sphinx-rtd-theme; extra == 'doc'
|
68 |
+
Requires-Dist: yarl; extra == 'doc'
|
69 |
+
Provides-Extra: dropbox
|
70 |
+
Requires-Dist: dropbox; extra == 'dropbox'
|
71 |
+
Requires-Dist: dropboxdrivefs; extra == 'dropbox'
|
72 |
+
Requires-Dist: requests; extra == 'dropbox'
|
73 |
+
Provides-Extra: entrypoints
|
74 |
+
Provides-Extra: full
|
75 |
+
Requires-Dist: adlfs; extra == 'full'
|
76 |
+
Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1; extra == 'full'
|
77 |
+
Requires-Dist: dask; extra == 'full'
|
78 |
+
Requires-Dist: distributed; extra == 'full'
|
79 |
+
Requires-Dist: dropbox; extra == 'full'
|
80 |
+
Requires-Dist: dropboxdrivefs; extra == 'full'
|
81 |
+
Requires-Dist: fusepy; extra == 'full'
|
82 |
+
Requires-Dist: gcsfs; extra == 'full'
|
83 |
+
Requires-Dist: libarchive-c; extra == 'full'
|
84 |
+
Requires-Dist: ocifs; extra == 'full'
|
85 |
+
Requires-Dist: panel; extra == 'full'
|
86 |
+
Requires-Dist: paramiko; extra == 'full'
|
87 |
+
Requires-Dist: pyarrow>=1; extra == 'full'
|
88 |
+
Requires-Dist: pygit2; extra == 'full'
|
89 |
+
Requires-Dist: requests; extra == 'full'
|
90 |
+
Requires-Dist: s3fs; extra == 'full'
|
91 |
+
Requires-Dist: smbprotocol; extra == 'full'
|
92 |
+
Requires-Dist: tqdm; extra == 'full'
|
93 |
+
Provides-Extra: fuse
|
94 |
+
Requires-Dist: fusepy; extra == 'fuse'
|
95 |
+
Provides-Extra: gcs
|
96 |
+
Requires-Dist: gcsfs; extra == 'gcs'
|
97 |
+
Provides-Extra: git
|
98 |
+
Requires-Dist: pygit2; extra == 'git'
|
99 |
+
Provides-Extra: github
|
100 |
+
Requires-Dist: requests; extra == 'github'
|
101 |
+
Provides-Extra: gs
|
102 |
+
Requires-Dist: gcsfs; extra == 'gs'
|
103 |
+
Provides-Extra: gui
|
104 |
+
Requires-Dist: panel; extra == 'gui'
|
105 |
+
Provides-Extra: hdfs
|
106 |
+
Requires-Dist: pyarrow>=1; extra == 'hdfs'
|
107 |
+
Provides-Extra: http
|
108 |
+
Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1; extra == 'http'
|
109 |
+
Provides-Extra: libarchive
|
110 |
+
Requires-Dist: libarchive-c; extra == 'libarchive'
|
111 |
+
Provides-Extra: oci
|
112 |
+
Requires-Dist: ocifs; extra == 'oci'
|
113 |
+
Provides-Extra: s3
|
114 |
+
Requires-Dist: s3fs; extra == 's3'
|
115 |
+
Provides-Extra: sftp
|
116 |
+
Requires-Dist: paramiko; extra == 'sftp'
|
117 |
+
Provides-Extra: smb
|
118 |
+
Requires-Dist: smbprotocol; extra == 'smb'
|
119 |
+
Provides-Extra: ssh
|
120 |
+
Requires-Dist: paramiko; extra == 'ssh'
|
121 |
+
Provides-Extra: test
|
122 |
+
Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1; extra == 'test'
|
123 |
+
Requires-Dist: numpy; extra == 'test'
|
124 |
+
Requires-Dist: pytest; extra == 'test'
|
125 |
+
Requires-Dist: pytest-asyncio!=0.22.0; extra == 'test'
|
126 |
+
Requires-Dist: pytest-benchmark; extra == 'test'
|
127 |
+
Requires-Dist: pytest-cov; extra == 'test'
|
128 |
+
Requires-Dist: pytest-mock; extra == 'test'
|
129 |
+
Requires-Dist: pytest-recording; extra == 'test'
|
130 |
+
Requires-Dist: pytest-rerunfailures; extra == 'test'
|
131 |
+
Requires-Dist: requests; extra == 'test'
|
132 |
+
Provides-Extra: test-downstream
|
133 |
+
Requires-Dist: aiobotocore<3.0.0,>=2.5.4; extra == 'test-downstream'
|
134 |
+
Requires-Dist: dask-expr; extra == 'test-downstream'
|
135 |
+
Requires-Dist: dask[dataframe,test]; extra == 'test-downstream'
|
136 |
+
Requires-Dist: moto[server]<5,>4; extra == 'test-downstream'
|
137 |
+
Requires-Dist: pytest-timeout; extra == 'test-downstream'
|
138 |
+
Requires-Dist: xarray; extra == 'test-downstream'
|
139 |
+
Provides-Extra: test-full
|
140 |
+
Requires-Dist: adlfs; extra == 'test-full'
|
141 |
+
Requires-Dist: aiohttp!=4.0.0a0,!=4.0.0a1; extra == 'test-full'
|
142 |
+
Requires-Dist: cloudpickle; extra == 'test-full'
|
143 |
+
Requires-Dist: dask; extra == 'test-full'
|
144 |
+
Requires-Dist: distributed; extra == 'test-full'
|
145 |
+
Requires-Dist: dropbox; extra == 'test-full'
|
146 |
+
Requires-Dist: dropboxdrivefs; extra == 'test-full'
|
147 |
+
Requires-Dist: fastparquet; extra == 'test-full'
|
148 |
+
Requires-Dist: fusepy; extra == 'test-full'
|
149 |
+
Requires-Dist: gcsfs; extra == 'test-full'
|
150 |
+
Requires-Dist: jinja2; extra == 'test-full'
|
151 |
+
Requires-Dist: kerchunk; extra == 'test-full'
|
152 |
+
Requires-Dist: libarchive-c; extra == 'test-full'
|
153 |
+
Requires-Dist: lz4; extra == 'test-full'
|
154 |
+
Requires-Dist: notebook; extra == 'test-full'
|
155 |
+
Requires-Dist: numpy; extra == 'test-full'
|
156 |
+
Requires-Dist: ocifs; extra == 'test-full'
|
157 |
+
Requires-Dist: pandas; extra == 'test-full'
|
158 |
+
Requires-Dist: panel; extra == 'test-full'
|
159 |
+
Requires-Dist: paramiko; extra == 'test-full'
|
160 |
+
Requires-Dist: pyarrow; extra == 'test-full'
|
161 |
+
Requires-Dist: pyarrow>=1; extra == 'test-full'
|
162 |
+
Requires-Dist: pyftpdlib; extra == 'test-full'
|
163 |
+
Requires-Dist: pygit2; extra == 'test-full'
|
164 |
+
Requires-Dist: pytest; extra == 'test-full'
|
165 |
+
Requires-Dist: pytest-asyncio!=0.22.0; extra == 'test-full'
|
166 |
+
Requires-Dist: pytest-benchmark; extra == 'test-full'
|
167 |
+
Requires-Dist: pytest-cov; extra == 'test-full'
|
168 |
+
Requires-Dist: pytest-mock; extra == 'test-full'
|
169 |
+
Requires-Dist: pytest-recording; extra == 'test-full'
|
170 |
+
Requires-Dist: pytest-rerunfailures; extra == 'test-full'
|
171 |
+
Requires-Dist: python-snappy; extra == 'test-full'
|
172 |
+
Requires-Dist: requests; extra == 'test-full'
|
173 |
+
Requires-Dist: smbprotocol; extra == 'test-full'
|
174 |
+
Requires-Dist: tqdm; extra == 'test-full'
|
175 |
+
Requires-Dist: urllib3; extra == 'test-full'
|
176 |
+
Requires-Dist: zarr; extra == 'test-full'
|
177 |
+
Requires-Dist: zstandard; extra == 'test-full'
|
178 |
+
Provides-Extra: tqdm
|
179 |
+
Requires-Dist: tqdm; extra == 'tqdm'
|
180 |
+
Description-Content-Type: text/markdown
|
181 |
+
|
182 |
+
# filesystem_spec
|
183 |
+
|
184 |
+
[![PyPI version](https://badge.fury.io/py/fsspec.svg)](https://pypi.python.org/pypi/fsspec/)
|
185 |
+
[![Anaconda-Server Badge](https://anaconda.org/conda-forge/fsspec/badges/version.svg)](https://anaconda.org/conda-forge/fsspec)
|
186 |
+
![Build](https://github.com/fsspec/filesystem_spec/workflows/CI/badge.svg)
|
187 |
+
[![Docs](https://readthedocs.org/projects/filesystem-spec/badge/?version=latest)](https://filesystem-spec.readthedocs.io/en/latest/?badge=latest)
|
188 |
+
|
189 |
+
A specification for pythonic filesystems.
|
190 |
+
|
191 |
+
## Install
|
192 |
+
|
193 |
+
```bash
|
194 |
+
pip install fsspec
|
195 |
+
```
|
196 |
+
|
197 |
+
would install the base fsspec. Various optionally supported features might require specification of custom
|
198 |
+
extra require, e.g. `pip install fsspec[ssh]` will install dependencies for `ssh` backends support.
|
199 |
+
Use `pip install fsspec[full]` for installation of all known extra dependencies.
|
200 |
+
|
201 |
+
Up-to-date package also provided through conda-forge distribution:
|
202 |
+
|
203 |
+
```bash
|
204 |
+
conda install -c conda-forge fsspec
|
205 |
+
```
|
206 |
+
|
207 |
+
|
208 |
+
## Purpose
|
209 |
+
|
210 |
+
To produce a template or specification for a file-system interface, that specific implementations should follow,
|
211 |
+
so that applications making use of them can rely on a common behaviour and not have to worry about the specific
|
212 |
+
internal implementation decisions with any given backend. Many such implementations are included in this package,
|
213 |
+
or in sister projects such as `s3fs` and `gcsfs`.
|
214 |
+
|
215 |
+
In addition, if this is well-designed, then additional functionality, such as a key-value store or FUSE
|
216 |
+
mounting of the file-system implementation may be available for all implementations "for free".
|
217 |
+
|
218 |
+
## Documentation
|
219 |
+
|
220 |
+
Please refer to [RTD](https://filesystem-spec.readthedocs.io/en/latest/?badge=latest)
|
221 |
+
|
222 |
+
## Develop
|
223 |
+
|
224 |
+
fsspec uses GitHub Actions for CI. Environment files can be found
|
225 |
+
in the "ci/" directory. Note that the main environment is called "py38",
|
226 |
+
but it is expected that the version of python installed be adjustable at
|
227 |
+
CI runtime. For local use, pick a version suitable for you.
|
228 |
+
|
229 |
+
```bash
|
230 |
+
# For a new environment (mamba / conda).
|
231 |
+
mamba create -n fsspec -c conda-forge python=3.9 -y
|
232 |
+
conda activate fsspec
|
233 |
+
|
234 |
+
# Standard dev install with docs and tests.
|
235 |
+
pip install -e ".[dev,doc,test]"
|
236 |
+
|
237 |
+
# Full tests except for downstream
|
238 |
+
pip install s3fs
|
239 |
+
pip uninstall s3fs
|
240 |
+
pip install -e .[dev,doc,test_full]
|
241 |
+
pip install s3fs --no-deps
|
242 |
+
pytest -v
|
243 |
+
|
244 |
+
# Downstream tests.
|
245 |
+
sh install_s3fs.sh
|
246 |
+
# Windows powershell.
|
247 |
+
install_s3fs.sh
|
248 |
+
```
|
249 |
+
|
250 |
+
### Testing
|
251 |
+
|
252 |
+
Tests can be run in the dev environment, if activated, via ``pytest fsspec``.
|
253 |
+
|
254 |
+
The full fsspec suite requires a system-level docker, docker-compose, and fuse
|
255 |
+
installation. If only making changes to one backend implementation, it is
|
256 |
+
not generally necessary to run all tests locally.
|
257 |
+
|
258 |
+
It is expected that contributors ensure that any change to fsspec does not
|
259 |
+
cause issues or regressions for either other fsspec-related packages such
|
260 |
+
as gcsfs and s3fs, nor for downstream users of fsspec. The "downstream" CI
|
261 |
+
run and corresponding environment file run a set of tests from the dask
|
262 |
+
test suite, and very minimal tests against pandas and zarr from the
|
263 |
+
test_downstream.py module in this repo.
|
264 |
+
|
265 |
+
### Code Formatting
|
266 |
+
|
267 |
+
fsspec uses [Black](https://black.readthedocs.io/en/stable) to ensure
|
268 |
+
a consistent code format throughout the project.
|
269 |
+
Run ``black fsspec`` from the root of the filesystem_spec repository to
|
270 |
+
auto-format your code. Additionally, many editors have plugins that will apply
|
271 |
+
``black`` as you edit files. ``black`` is included in the ``tox`` environments.
|
272 |
+
|
273 |
+
Optionally, you may wish to setup [pre-commit hooks](https://pre-commit.com) to
|
274 |
+
automatically run ``black`` when you make a git commit.
|
275 |
+
Run ``pre-commit install --install-hooks`` from the root of the
|
276 |
+
filesystem_spec repository to setup pre-commit hooks. ``black`` will now be run
|
277 |
+
before you commit, reformatting any changed files. You can format without
|
278 |
+
committing via ``pre-commit run`` or skip these checks with ``git commit
|
279 |
+
--no-verify``.
|
meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/RECORD
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fsspec-2024.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
2 |
+
fsspec-2024.12.0.dist-info/METADATA,sha256=bZRSxMVT7oHOUheJQEDMsChBFuby1pTg8WUbnLYATrc,11801
|
3 |
+
fsspec-2024.12.0.dist-info/RECORD,,
|
4 |
+
fsspec-2024.12.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
5 |
+
fsspec-2024.12.0.dist-info/licenses/LICENSE,sha256=LcNUls5TpzB5FcAIqESq1T53K0mzTN0ARFBnaRQH7JQ,1513
|
6 |
+
fsspec/__init__.py,sha256=l9MJaNNV2d4wKpCtMvXDr55n92DkdrAayGy3F9ICjzk,1998
|
7 |
+
fsspec/__pycache__/__init__.cpython-313.pyc,,
|
8 |
+
fsspec/__pycache__/_version.cpython-313.pyc,,
|
9 |
+
fsspec/__pycache__/archive.cpython-313.pyc,,
|
10 |
+
fsspec/__pycache__/asyn.cpython-313.pyc,,
|
11 |
+
fsspec/__pycache__/caching.cpython-313.pyc,,
|
12 |
+
fsspec/__pycache__/callbacks.cpython-313.pyc,,
|
13 |
+
fsspec/__pycache__/compression.cpython-313.pyc,,
|
14 |
+
fsspec/__pycache__/config.cpython-313.pyc,,
|
15 |
+
fsspec/__pycache__/conftest.cpython-313.pyc,,
|
16 |
+
fsspec/__pycache__/core.cpython-313.pyc,,
|
17 |
+
fsspec/__pycache__/dircache.cpython-313.pyc,,
|
18 |
+
fsspec/__pycache__/exceptions.cpython-313.pyc,,
|
19 |
+
fsspec/__pycache__/fuse.cpython-313.pyc,,
|
20 |
+
fsspec/__pycache__/generic.cpython-313.pyc,,
|
21 |
+
fsspec/__pycache__/gui.cpython-313.pyc,,
|
22 |
+
fsspec/__pycache__/json.cpython-313.pyc,,
|
23 |
+
fsspec/__pycache__/mapping.cpython-313.pyc,,
|
24 |
+
fsspec/__pycache__/parquet.cpython-313.pyc,,
|
25 |
+
fsspec/__pycache__/registry.cpython-313.pyc,,
|
26 |
+
fsspec/__pycache__/spec.cpython-313.pyc,,
|
27 |
+
fsspec/__pycache__/transaction.cpython-313.pyc,,
|
28 |
+
fsspec/__pycache__/utils.cpython-313.pyc,,
|
29 |
+
fsspec/_version.py,sha256=wQ2VhCCZZrkDgAic2RGrr4PbmFQdFL4PTIWVnK3r9tM,419
|
30 |
+
fsspec/archive.py,sha256=S__DzfZj-urAN3tp2W6jJ6YDiXG1fAl7FjvWUN73qIE,2386
|
31 |
+
fsspec/asyn.py,sha256=rsnCsFUmBZmKJqg9m-IDWInoQtE4wV0rGDZEXZwuU3c,36500
|
32 |
+
fsspec/caching.py,sha256=oHVy9zpy4Oqk5f1t3-Q31bbw0tsmfddGGKLJs__OdKA,32790
|
33 |
+
fsspec/callbacks.py,sha256=BDIwLzK6rr_0V5ch557fSzsivCElpdqhXr5dZ9Te-EE,9210
|
34 |
+
fsspec/compression.py,sha256=jCSUMJu-zSNyrusnHT0wKXgOd1tTJR6vM126i5SR5Zc,4865
|
35 |
+
fsspec/config.py,sha256=LF4Zmu1vhJW7Je9Q-cwkRc3xP7Rhyy7Xnwj26Z6sv2g,4279
|
36 |
+
fsspec/conftest.py,sha256=fVfx-NLrH_OZS1TIpYNoPzM7efEcMoL62reHOdYeFCA,1245
|
37 |
+
fsspec/core.py,sha256=bn-y3Mn9q8Gh3Ng_yAIDfIjyysQ95tuK78RlhlrqTb4,23828
|
38 |
+
fsspec/dircache.py,sha256=YzogWJrhEastHU7vWz-cJiJ7sdtLXFXhEpInGKd4EcM,2717
|
39 |
+
fsspec/exceptions.py,sha256=pauSLDMxzTJMOjvX1WEUK0cMyFkrFxpWJsyFywav7A8,331
|
40 |
+
fsspec/fuse.py,sha256=Q-3NOOyLqBfYa4Db5E19z_ZY36zzYHtIs1mOUasItBQ,10177
|
41 |
+
fsspec/generic.py,sha256=AFbo-mHBt5QJV1Aplg5CJuUiiJ4bNQhcKRuwkZJdWac,13761
|
42 |
+
fsspec/gui.py,sha256=xBnHL2-r0LVwhDAtnHoPpXts7jd4Z32peawCJiI-7lI,13975
|
43 |
+
fsspec/implementations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
44 |
+
fsspec/implementations/__pycache__/__init__.cpython-313.pyc,,
|
45 |
+
fsspec/implementations/__pycache__/arrow.cpython-313.pyc,,
|
46 |
+
fsspec/implementations/__pycache__/asyn_wrapper.cpython-313.pyc,,
|
47 |
+
fsspec/implementations/__pycache__/cache_mapper.cpython-313.pyc,,
|
48 |
+
fsspec/implementations/__pycache__/cache_metadata.cpython-313.pyc,,
|
49 |
+
fsspec/implementations/__pycache__/cached.cpython-313.pyc,,
|
50 |
+
fsspec/implementations/__pycache__/dask.cpython-313.pyc,,
|
51 |
+
fsspec/implementations/__pycache__/data.cpython-313.pyc,,
|
52 |
+
fsspec/implementations/__pycache__/dbfs.cpython-313.pyc,,
|
53 |
+
fsspec/implementations/__pycache__/dirfs.cpython-313.pyc,,
|
54 |
+
fsspec/implementations/__pycache__/ftp.cpython-313.pyc,,
|
55 |
+
fsspec/implementations/__pycache__/git.cpython-313.pyc,,
|
56 |
+
fsspec/implementations/__pycache__/github.cpython-313.pyc,,
|
57 |
+
fsspec/implementations/__pycache__/http.cpython-313.pyc,,
|
58 |
+
fsspec/implementations/__pycache__/jupyter.cpython-313.pyc,,
|
59 |
+
fsspec/implementations/__pycache__/libarchive.cpython-313.pyc,,
|
60 |
+
fsspec/implementations/__pycache__/local.cpython-313.pyc,,
|
61 |
+
fsspec/implementations/__pycache__/memory.cpython-313.pyc,,
|
62 |
+
fsspec/implementations/__pycache__/reference.cpython-313.pyc,,
|
63 |
+
fsspec/implementations/__pycache__/sftp.cpython-313.pyc,,
|
64 |
+
fsspec/implementations/__pycache__/smb.cpython-313.pyc,,
|
65 |
+
fsspec/implementations/__pycache__/tar.cpython-313.pyc,,
|
66 |
+
fsspec/implementations/__pycache__/webhdfs.cpython-313.pyc,,
|
67 |
+
fsspec/implementations/__pycache__/zip.cpython-313.pyc,,
|
68 |
+
fsspec/implementations/arrow.py,sha256=721Dikne_lV_0tlgk9jyKmHL6W-5MT0h2LKGvOYQTPI,8623
|
69 |
+
fsspec/implementations/asyn_wrapper.py,sha256=cXfSkF2AaboInIIA_6jmB796RP_BXd8u08loPAHQsxQ,2864
|
70 |
+
fsspec/implementations/cache_mapper.py,sha256=W4wlxyPxZbSp9ItJ0pYRVBMh6bw9eFypgP6kUYuuiI4,2421
|
71 |
+
fsspec/implementations/cache_metadata.py,sha256=pcOJYcBQY5OaC7Yhw0F3wjg08QLYApGmoISCrbs59ks,8511
|
72 |
+
fsspec/implementations/cached.py,sha256=KA6c4jqrGeeg8WNPLsh8FkL3KeRAQtGLzKw18vSF1CI,32820
|
73 |
+
fsspec/implementations/dask.py,sha256=CXZbJzIVOhKV8ILcxuy3bTvcacCueAbyQxmvAkbPkrk,4466
|
74 |
+
fsspec/implementations/data.py,sha256=LDLczxRh8h7x39Zjrd-GgzdQHr78yYxDlrv2C9Uxb5E,1658
|
75 |
+
fsspec/implementations/dbfs.py,sha256=a0eNjLxyfFK7pbEa52U8K-PhNHukzdGVx1eLcVniaXY,15092
|
76 |
+
fsspec/implementations/dirfs.py,sha256=ymakitNNQ07tW76EShyw3rC9RvIDHl4gtuOhE_h1vUg,12032
|
77 |
+
fsspec/implementations/ftp.py,sha256=sorsczLp_2J3ukONsbZY-11sRZP6H5a3V7XXf6o6ip0,11936
|
78 |
+
fsspec/implementations/git.py,sha256=4SElW9U5d3k3_ITlvUAx59Yk7XLNRTqkGa2C3hCUkWM,3754
|
79 |
+
fsspec/implementations/github.py,sha256=eAn1kJ7VeWR6gVoVRLBYclF_rQDXSJU-xzMXpvPQWqs,8002
|
80 |
+
fsspec/implementations/http.py,sha256=d7G7_pRTMHouKE42lvRNHqB5u4XQi0dm4wb-6U_IiF4,29361
|
81 |
+
fsspec/implementations/jupyter.py,sha256=B2uj7OEm7yIk-vRSsO37_ND0t0EBvn4B-Su43ibN4Pg,3811
|
82 |
+
fsspec/implementations/libarchive.py,sha256=5_I2DiLXwQ1JC8x-K7jXu-tBwhO9dj7tFLnb0bTnVMQ,7102
|
83 |
+
fsspec/implementations/local.py,sha256=YvR9b2MndSQIHszAMUkFvN65eWVbIfoGJJjAeS43ZS4,15259
|
84 |
+
fsspec/implementations/memory.py,sha256=cLNrK9wk97sl4Tre9uVDXWj6mEHvvVVIgaVgNA5KVIg,10527
|
85 |
+
fsspec/implementations/reference.py,sha256=E-XYtnsHpE1e4x2io1ILOioGsWpCLDH1bqYN3QPGUJI,45930
|
86 |
+
fsspec/implementations/sftp.py,sha256=fMY9XZcmpjszQ2tCqO_TPaJesaeD_Dv7ptYzgUPGoO0,5631
|
87 |
+
fsspec/implementations/smb.py,sha256=5fhu8h06nOLBPh2c48aT7WBRqh9cEcbIwtyu06wTjec,15236
|
88 |
+
fsspec/implementations/tar.py,sha256=dam78Tp_CozybNqCY2JYgGBS3Uc9FuJUAT9oB0lolOs,4111
|
89 |
+
fsspec/implementations/webhdfs.py,sha256=G9wGywj7BkZk4Mu9zXu6HaDlEqX4F8Gw1i4k46CP_-o,16769
|
90 |
+
fsspec/implementations/zip.py,sha256=9LBMHPft2OutJl2Ft-r9u_z3GptLkc2n91ur2A3bCbg,6072
|
91 |
+
fsspec/json.py,sha256=65sQ0Y7mTj33u_Y4IId5up4abQ3bAel4E4QzbKMiQSg,3826
|
92 |
+
fsspec/mapping.py,sha256=m2ndB_gtRBXYmNJg0Ie1-BVR75TFleHmIQBzC-yWhjU,8343
|
93 |
+
fsspec/parquet.py,sha256=6ibAmG527L5JNFS0VO8BDNlxHdA3bVYqdByeiFgpUVM,19448
|
94 |
+
fsspec/registry.py,sha256=A2r3PiZd17192sGHLwWNFbK8RFiDA7gSbfboIJ07wTY,11471
|
95 |
+
fsspec/spec.py,sha256=d_NY5YVuwV7YCRduKkaR_z8B9GUna4-H9mOinymEMFY,75971
|
96 |
+
fsspec/tests/abstract/__init__.py,sha256=4xUJrv7gDgc85xAOz1p-V_K1hrsdMWTSa0rviALlJk8,10181
|
97 |
+
fsspec/tests/abstract/__pycache__/__init__.cpython-313.pyc,,
|
98 |
+
fsspec/tests/abstract/__pycache__/common.cpython-313.pyc,,
|
99 |
+
fsspec/tests/abstract/__pycache__/copy.cpython-313.pyc,,
|
100 |
+
fsspec/tests/abstract/__pycache__/get.cpython-313.pyc,,
|
101 |
+
fsspec/tests/abstract/__pycache__/mv.cpython-313.pyc,,
|
102 |
+
fsspec/tests/abstract/__pycache__/open.cpython-313.pyc,,
|
103 |
+
fsspec/tests/abstract/__pycache__/pipe.cpython-313.pyc,,
|
104 |
+
fsspec/tests/abstract/__pycache__/put.cpython-313.pyc,,
|
105 |
+
fsspec/tests/abstract/common.py,sha256=1GQwNo5AONzAnzZj0fWgn8NJPLXALehbsuGxS3FzWVU,4973
|
106 |
+
fsspec/tests/abstract/copy.py,sha256=gU5-d97U3RSde35Vp4RxPY4rWwL744HiSrJ8IBOp9-8,19967
|
107 |
+
fsspec/tests/abstract/get.py,sha256=vNR4HztvTR7Cj56AMo7_tx7TeYz1Jgr_2Wb8Lv-UiBY,20755
|
108 |
+
fsspec/tests/abstract/mv.py,sha256=k8eUEBIrRrGMsBY5OOaDXdGnQUKGwDIfQyduB6YD3Ns,1982
|
109 |
+
fsspec/tests/abstract/open.py,sha256=Fi2PBPYLbRqysF8cFm0rwnB41kMdQVYjq8cGyDXp3BU,329
|
110 |
+
fsspec/tests/abstract/pipe.py,sha256=LFzIrLCB5GLXf9rzFKJmE8AdG7LQ_h4bJo70r8FLPqM,402
|
111 |
+
fsspec/tests/abstract/put.py,sha256=7aih17OKB_IZZh1Mkq1eBDIjobhtMQmI8x-Pw-S_aZk,21201
|
112 |
+
fsspec/transaction.py,sha256=xliRG6U2Zf3khG4xcw9WiB-yAoqJSHEGK_VjHOdtgo0,2398
|
113 |
+
fsspec/utils.py,sha256=A11t25RnpiQ30RO6xeR0Qqlu3fGj8bnc40jg08tlYSI,22980
|
meow/lib/python3.13/site-packages/fsspec-2024.12.0.dist-info/WHEEL
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: hatchling 1.27.0
|
3 |
+
Root-Is-Purelib: true
|
4 |
+
Tag: py3-none-any
|
meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright [yyyy] [name of copyright owner]
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/RECORD
ADDED
@@ -0,0 +1,229 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
../../../bin/huggingface-cli,sha256=AbjM4ROOB4Lf8gN6WjLTs8e9U2TfX_jxHyoVdqvwQ34,267
|
2 |
+
huggingface_hub-0.27.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
3 |
+
huggingface_hub-0.27.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
4 |
+
huggingface_hub-0.27.0.dist-info/METADATA,sha256=QLeeJfWH1pjVlAv3sW2YlH16XiOZQs0z-n0sFGoUzgQ,13480
|
5 |
+
huggingface_hub-0.27.0.dist-info/RECORD,,
|
6 |
+
huggingface_hub-0.27.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7 |
+
huggingface_hub-0.27.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
8 |
+
huggingface_hub-0.27.0.dist-info/entry_points.txt,sha256=Y3Z2L02rBG7va_iE6RPXolIgwOdwUFONyRN3kXMxZ0g,131
|
9 |
+
huggingface_hub-0.27.0.dist-info/top_level.txt,sha256=8KzlQJAY4miUvjAssOAJodqKOw3harNzuiwGQ9qLSSk,16
|
10 |
+
huggingface_hub/__init__.py,sha256=GA_gQRSW4I3i09j-ANZ88MxMi3RYCMShYQEDTNMxQjs,37213
|
11 |
+
huggingface_hub/__pycache__/__init__.cpython-313.pyc,,
|
12 |
+
huggingface_hub/__pycache__/_commit_api.cpython-313.pyc,,
|
13 |
+
huggingface_hub/__pycache__/_commit_scheduler.cpython-313.pyc,,
|
14 |
+
huggingface_hub/__pycache__/_inference_endpoints.cpython-313.pyc,,
|
15 |
+
huggingface_hub/__pycache__/_local_folder.cpython-313.pyc,,
|
16 |
+
huggingface_hub/__pycache__/_login.cpython-313.pyc,,
|
17 |
+
huggingface_hub/__pycache__/_snapshot_download.cpython-313.pyc,,
|
18 |
+
huggingface_hub/__pycache__/_space_api.cpython-313.pyc,,
|
19 |
+
huggingface_hub/__pycache__/_tensorboard_logger.cpython-313.pyc,,
|
20 |
+
huggingface_hub/__pycache__/_upload_large_folder.cpython-313.pyc,,
|
21 |
+
huggingface_hub/__pycache__/_webhooks_payload.cpython-313.pyc,,
|
22 |
+
huggingface_hub/__pycache__/_webhooks_server.cpython-313.pyc,,
|
23 |
+
huggingface_hub/__pycache__/community.cpython-313.pyc,,
|
24 |
+
huggingface_hub/__pycache__/constants.cpython-313.pyc,,
|
25 |
+
huggingface_hub/__pycache__/errors.cpython-313.pyc,,
|
26 |
+
huggingface_hub/__pycache__/fastai_utils.cpython-313.pyc,,
|
27 |
+
huggingface_hub/__pycache__/file_download.cpython-313.pyc,,
|
28 |
+
huggingface_hub/__pycache__/hf_api.cpython-313.pyc,,
|
29 |
+
huggingface_hub/__pycache__/hf_file_system.cpython-313.pyc,,
|
30 |
+
huggingface_hub/__pycache__/hub_mixin.cpython-313.pyc,,
|
31 |
+
huggingface_hub/__pycache__/inference_api.cpython-313.pyc,,
|
32 |
+
huggingface_hub/__pycache__/keras_mixin.cpython-313.pyc,,
|
33 |
+
huggingface_hub/__pycache__/lfs.cpython-313.pyc,,
|
34 |
+
huggingface_hub/__pycache__/repocard.cpython-313.pyc,,
|
35 |
+
huggingface_hub/__pycache__/repocard_data.cpython-313.pyc,,
|
36 |
+
huggingface_hub/__pycache__/repository.cpython-313.pyc,,
|
37 |
+
huggingface_hub/_commit_api.py,sha256=Y9eTaW4bYzxtrZsSniVtfeAuFafqx8x1ofMI5es8hvM,31057
|
38 |
+
huggingface_hub/_commit_scheduler.py,sha256=tfIoO1xWHjTJ6qy6VS6HIoymDycFPg0d6pBSZprrU2U,14679
|
39 |
+
huggingface_hub/_inference_endpoints.py,sha256=wzjD8P68VpUDHzIDbXzFXsM2Y-aNVSAap7BXsZFuthk,16750
|
40 |
+
huggingface_hub/_local_folder.py,sha256=p9KQJkGD4ZrQ-2gbprB_NsOWGZJ_T5mYGh_BB8gqvx8,16259
|
41 |
+
huggingface_hub/_login.py,sha256=ssf4viT5BhHI2ZidnSuAZcrwSxzaLOrf8xgRVKuvu_A,20298
|
42 |
+
huggingface_hub/_snapshot_download.py,sha256=zZDaPBb4CfMCU7DgxjbaFmdoISCY425RaH7wXwFijEM,14992
|
43 |
+
huggingface_hub/_space_api.py,sha256=QVOUNty2T4RxPoxf9FzUjXmjHiGXP0mqXJzqQ7GmoJo,5363
|
44 |
+
huggingface_hub/_tensorboard_logger.py,sha256=ZkYcAUiRC8RGL214QUYtp58O8G5tn-HF6DCWha9imcA,8358
|
45 |
+
huggingface_hub/_upload_large_folder.py,sha256=g9P2-pQkbOd9UF_unXd9SpCX8p_U0gLyjqXZpkUl4p0,23484
|
46 |
+
huggingface_hub/_webhooks_payload.py,sha256=Xm3KaK7tCOGBlXkuZvbym6zjHXrT1XCrbUFWuXiBmNY,3617
|
47 |
+
huggingface_hub/_webhooks_server.py,sha256=oCvpFrYjrhJjClAMw26SQfvN4DUItgK2IhFp1OVh2bU,15623
|
48 |
+
huggingface_hub/commands/__init__.py,sha256=AkbM2a-iGh0Vq_xAWhK3mu3uZ44km8-X5uWjKcvcrUQ,928
|
49 |
+
huggingface_hub/commands/__pycache__/__init__.cpython-313.pyc,,
|
50 |
+
huggingface_hub/commands/__pycache__/_cli_utils.cpython-313.pyc,,
|
51 |
+
huggingface_hub/commands/__pycache__/delete_cache.cpython-313.pyc,,
|
52 |
+
huggingface_hub/commands/__pycache__/download.cpython-313.pyc,,
|
53 |
+
huggingface_hub/commands/__pycache__/env.cpython-313.pyc,,
|
54 |
+
huggingface_hub/commands/__pycache__/huggingface_cli.cpython-313.pyc,,
|
55 |
+
huggingface_hub/commands/__pycache__/lfs.cpython-313.pyc,,
|
56 |
+
huggingface_hub/commands/__pycache__/repo_files.cpython-313.pyc,,
|
57 |
+
huggingface_hub/commands/__pycache__/scan_cache.cpython-313.pyc,,
|
58 |
+
huggingface_hub/commands/__pycache__/tag.cpython-313.pyc,,
|
59 |
+
huggingface_hub/commands/__pycache__/upload.cpython-313.pyc,,
|
60 |
+
huggingface_hub/commands/__pycache__/upload_large_folder.cpython-313.pyc,,
|
61 |
+
huggingface_hub/commands/__pycache__/user.cpython-313.pyc,,
|
62 |
+
huggingface_hub/commands/__pycache__/version.cpython-313.pyc,,
|
63 |
+
huggingface_hub/commands/_cli_utils.py,sha256=Nt6CjbkYqQQRuh70bUXVA6rZpbZt_Sa1WqBUxjQLu6g,2095
|
64 |
+
huggingface_hub/commands/delete_cache.py,sha256=Rb1BtIltJPnQ-th7tcK_L4mFqfk785t3KXV77xXKBP4,16131
|
65 |
+
huggingface_hub/commands/download.py,sha256=1YXKttB8YBX7SJ0Jxg0t1n8yp2BUZXtY0ck6DhCg-XE,8183
|
66 |
+
huggingface_hub/commands/env.py,sha256=yYl4DSS14V8t244nAi0t77Izx5LIdgS_dy6xiV5VQME,1226
|
67 |
+
huggingface_hub/commands/huggingface_cli.py,sha256=ZwW_nwgppyj-GA6iM3mgmbXMZ63bgtpGl_yIQDyWS4A,2414
|
68 |
+
huggingface_hub/commands/lfs.py,sha256=xdbnNRO04UuQemEhUGT809jFgQn9Rj-SnyT_0Ph-VYg,7342
|
69 |
+
huggingface_hub/commands/repo_files.py,sha256=Nfv8TjuaZVOrj7TZjrojtjdD8Wf54aZvYPDEOevh7tA,4923
|
70 |
+
huggingface_hub/commands/scan_cache.py,sha256=CSXyIDCzOcxSFwIc_SqUC6J_TIc7uHXjrtCjkmE7k2g,8560
|
71 |
+
huggingface_hub/commands/tag.py,sha256=0LNQZyK-WKi0VIL9i1xWzKxJ1ILw1jxMF_E6t2weJss,6288
|
72 |
+
huggingface_hub/commands/upload.py,sha256=xMExm68YcR8R_dDRi3bcIC1qVCvRFRW7aP_AGxGZ1rc,13656
|
73 |
+
huggingface_hub/commands/upload_large_folder.py,sha256=LKvzEmKeRatBLlnJF__b5jFBQ8bcM2KQgN_Pcf1X5hE,6128
|
74 |
+
huggingface_hub/commands/user.py,sha256=M6Ef045YcyV4mFCbLaTRPciQDC6xtV9MMheeen69D0E,11168
|
75 |
+
huggingface_hub/commands/version.py,sha256=vfCJn7GO1m-DtDmbdsty8_RTVtnZ7lX6MJsx0Bf4e-s,1266
|
76 |
+
huggingface_hub/community.py,sha256=4MtcoxEI9_0lmmilBEnvUEi8_O1Ivfa8p6eKxYU5-ts,12198
|
77 |
+
huggingface_hub/constants.py,sha256=WshhUKAs6kyoBPN1ek9Zsr2JY-bvjof69VJOfuu8tm4,8090
|
78 |
+
huggingface_hub/errors.py,sha256=zble0j94ai8zwyM0a2DovwcF372zQohwDsgajTsaxqI,9703
|
79 |
+
huggingface_hub/fastai_utils.py,sha256=DpeH9d-6ut2k_nCAAwglM51XmRmgfbRe2SPifpVL5Yk,16745
|
80 |
+
huggingface_hub/file_download.py,sha256=NaHWoBHK4seUdhnx2zbtaTBK0TvTMsFqh2_gPKVTc2g,70380
|
81 |
+
huggingface_hub/hf_api.py,sha256=ORNYYlvHLiLRLcw2snHp6iIjv_jk15IPRL_ikw4Cc8M,422655
|
82 |
+
huggingface_hub/hf_file_system.py,sha256=m_g7uYLGxTdsBnhvR5835jvYMAuEBsUSFvEbzZKzzoo,47500
|
83 |
+
huggingface_hub/hub_mixin.py,sha256=_NOs2f7fQUAPIWltYDGGW81tkrguuHymMdIkw8Ct3Pw,37223
|
84 |
+
huggingface_hub/inference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
85 |
+
huggingface_hub/inference/__pycache__/__init__.cpython-313.pyc,,
|
86 |
+
huggingface_hub/inference/__pycache__/_client.cpython-313.pyc,,
|
87 |
+
huggingface_hub/inference/__pycache__/_common.cpython-313.pyc,,
|
88 |
+
huggingface_hub/inference/_client.py,sha256=DXQiejjYkZnRtefIaCjkEZxzlcyjMhwAaE3xAn_dcTw,149096
|
89 |
+
huggingface_hub/inference/_common.py,sha256=sIgFTKBcoIwddzwGsQa0a0eAQ8SgXP6eenD5lzBub8E,16819
|
90 |
+
huggingface_hub/inference/_generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
91 |
+
huggingface_hub/inference/_generated/__pycache__/__init__.cpython-313.pyc,,
|
92 |
+
huggingface_hub/inference/_generated/__pycache__/_async_client.cpython-313.pyc,,
|
93 |
+
huggingface_hub/inference/_generated/_async_client.py,sha256=sV3fcN2HQUquwU8N9Ag9yyzJGFeAFHBC8mU-ttN-Vk8,155645
|
94 |
+
huggingface_hub/inference/_generated/types/__init__.py,sha256=IOp7v252gLZbFb7MUCjDSvzwchycZqUo2gZPrpSDtV8,6211
|
95 |
+
huggingface_hub/inference/_generated/types/__pycache__/__init__.cpython-313.pyc,,
|
96 |
+
huggingface_hub/inference/_generated/types/__pycache__/audio_classification.cpython-313.pyc,,
|
97 |
+
huggingface_hub/inference/_generated/types/__pycache__/audio_to_audio.cpython-313.pyc,,
|
98 |
+
huggingface_hub/inference/_generated/types/__pycache__/automatic_speech_recognition.cpython-313.pyc,,
|
99 |
+
huggingface_hub/inference/_generated/types/__pycache__/base.cpython-313.pyc,,
|
100 |
+
huggingface_hub/inference/_generated/types/__pycache__/chat_completion.cpython-313.pyc,,
|
101 |
+
huggingface_hub/inference/_generated/types/__pycache__/depth_estimation.cpython-313.pyc,,
|
102 |
+
huggingface_hub/inference/_generated/types/__pycache__/document_question_answering.cpython-313.pyc,,
|
103 |
+
huggingface_hub/inference/_generated/types/__pycache__/feature_extraction.cpython-313.pyc,,
|
104 |
+
huggingface_hub/inference/_generated/types/__pycache__/fill_mask.cpython-313.pyc,,
|
105 |
+
huggingface_hub/inference/_generated/types/__pycache__/image_classification.cpython-313.pyc,,
|
106 |
+
huggingface_hub/inference/_generated/types/__pycache__/image_segmentation.cpython-313.pyc,,
|
107 |
+
huggingface_hub/inference/_generated/types/__pycache__/image_to_image.cpython-313.pyc,,
|
108 |
+
huggingface_hub/inference/_generated/types/__pycache__/image_to_text.cpython-313.pyc,,
|
109 |
+
huggingface_hub/inference/_generated/types/__pycache__/object_detection.cpython-313.pyc,,
|
110 |
+
huggingface_hub/inference/_generated/types/__pycache__/question_answering.cpython-313.pyc,,
|
111 |
+
huggingface_hub/inference/_generated/types/__pycache__/sentence_similarity.cpython-313.pyc,,
|
112 |
+
huggingface_hub/inference/_generated/types/__pycache__/summarization.cpython-313.pyc,,
|
113 |
+
huggingface_hub/inference/_generated/types/__pycache__/table_question_answering.cpython-313.pyc,,
|
114 |
+
huggingface_hub/inference/_generated/types/__pycache__/text2text_generation.cpython-313.pyc,,
|
115 |
+
huggingface_hub/inference/_generated/types/__pycache__/text_classification.cpython-313.pyc,,
|
116 |
+
huggingface_hub/inference/_generated/types/__pycache__/text_generation.cpython-313.pyc,,
|
117 |
+
huggingface_hub/inference/_generated/types/__pycache__/text_to_audio.cpython-313.pyc,,
|
118 |
+
huggingface_hub/inference/_generated/types/__pycache__/text_to_image.cpython-313.pyc,,
|
119 |
+
huggingface_hub/inference/_generated/types/__pycache__/text_to_speech.cpython-313.pyc,,
|
120 |
+
huggingface_hub/inference/_generated/types/__pycache__/token_classification.cpython-313.pyc,,
|
121 |
+
huggingface_hub/inference/_generated/types/__pycache__/translation.cpython-313.pyc,,
|
122 |
+
huggingface_hub/inference/_generated/types/__pycache__/video_classification.cpython-313.pyc,,
|
123 |
+
huggingface_hub/inference/_generated/types/__pycache__/visual_question_answering.cpython-313.pyc,,
|
124 |
+
huggingface_hub/inference/_generated/types/__pycache__/zero_shot_classification.cpython-313.pyc,,
|
125 |
+
huggingface_hub/inference/_generated/types/__pycache__/zero_shot_image_classification.cpython-313.pyc,,
|
126 |
+
huggingface_hub/inference/_generated/types/__pycache__/zero_shot_object_detection.cpython-313.pyc,,
|
127 |
+
huggingface_hub/inference/_generated/types/audio_classification.py,sha256=U340ccLtMjqRMtD3hviXCQaWk_DWJgQWor8vLgW_GKM,1552
|
128 |
+
huggingface_hub/inference/_generated/types/audio_to_audio.py,sha256=n7GeCepzt254yoSLsdjrI1j4fzYgjWzxoaKE5gZJc48,881
|
129 |
+
huggingface_hub/inference/_generated/types/automatic_speech_recognition.py,sha256=g5L2LasOLPW49ik69SlUu-EVdNn7IWWpmGhaP9b-en0,5582
|
130 |
+
huggingface_hub/inference/_generated/types/base.py,sha256=dQ-ej4weVueTJQXaDFLYFhKvlCUYVH2k9r-Ck0uhFIU,5870
|
131 |
+
huggingface_hub/inference/_generated/types/chat_completion.py,sha256=6QKg0upzaNTv5esWfRZbtYixNrcsvGNrUpPw1mNezQ8,9589
|
132 |
+
huggingface_hub/inference/_generated/types/depth_estimation.py,sha256=3ZUI2w3J-WgDDxlRQ_444r46TlFRhHBt2LN2LLVJB-w,919
|
133 |
+
huggingface_hub/inference/_generated/types/document_question_answering.py,sha256=NtEF_MRPbj4iaNIFSBC5sPf92HKeOc3B7ZZE1RooLFU,3170
|
134 |
+
huggingface_hub/inference/_generated/types/feature_extraction.py,sha256=-lCiN_i4ChGz4Dtw75muArKuvVfpquy-XRfC7OS7r0Q,1490
|
135 |
+
huggingface_hub/inference/_generated/types/fill_mask.py,sha256=fxYjJgERCwKS-LUc3FPfKEqo0x-BemwmC_MbgUZwdkQ,1687
|
136 |
+
huggingface_hub/inference/_generated/types/image_classification.py,sha256=hCvz1MpMVXY3KlVENmq1Ygn4SsbE9UngPH9Y7Nj_kgg,1564
|
137 |
+
huggingface_hub/inference/_generated/types/image_segmentation.py,sha256=4T0rj-wGJnmqvbcj7kVt_L_mhWjEpjFH9o0VhHXaGRs,1929
|
138 |
+
huggingface_hub/inference/_generated/types/image_to_image.py,sha256=ZJnOADT1UE0kuBJxiA4ITTjQmp0PI4p3DcNoWRzSwkE,1945
|
139 |
+
huggingface_hub/inference/_generated/types/image_to_text.py,sha256=kocfj5jn_sPDk5FSR_PGl4Xq72BNHOwHEKCP3wv2poY,4887
|
140 |
+
huggingface_hub/inference/_generated/types/object_detection.py,sha256=eS3bbkpdtjLu2k2TjlEvLjVtMI5OdHJK1jq3kTzVZdQ,1968
|
141 |
+
huggingface_hub/inference/_generated/types/question_answering.py,sha256=Dzpwz3KWq3uQuzIwMRbUxSHscpLsVOPjwrKVqLJR5vA,2866
|
142 |
+
huggingface_hub/inference/_generated/types/sentence_similarity.py,sha256=y3KHkCYxn1tOEPrOQ6DNqQtCEhNNtGEN5aMstiQ6hVA,1042
|
143 |
+
huggingface_hub/inference/_generated/types/summarization.py,sha256=uwW7QdEXhWSkDIqlz_EEZiaaQscgjzTocR1bu7LRaxU,1466
|
144 |
+
huggingface_hub/inference/_generated/types/table_question_answering.py,sha256=YLMT7FY_nx6CSv146p6MyFMzwSZLsYSmOQVG39aiKUQ,2261
|
145 |
+
huggingface_hub/inference/_generated/types/text2text_generation.py,sha256=dqwTt-i-k6jo9DdGUS5ZRtrkLTVWoje3L5C3-pMv45Q,1588
|
146 |
+
huggingface_hub/inference/_generated/types/text_classification.py,sha256=j-0g3Ajaxg4e5TQlNs3UqVryvp5delYm8znrgzvkW-8,1424
|
147 |
+
huggingface_hub/inference/_generated/types/text_generation.py,sha256=TKJb26T03Gyw3As0IXmRzAritNeKUpturKfO_2OenzA,5798
|
148 |
+
huggingface_hub/inference/_generated/types/text_to_audio.py,sha256=y5pT8-hWr1CK1Zxi0LPcfNbk2Mr0_DdfpRKRP1jSNts,4879
|
149 |
+
huggingface_hub/inference/_generated/types/text_to_image.py,sha256=DsCku9u8MaXdkkOvPqjYwHZgeCfI4HbGB8vRsPc7uUE,1984
|
150 |
+
huggingface_hub/inference/_generated/types/text_to_speech.py,sha256=Kc42sG1ybdhHn7Jo-t69FqVCxQ7aBhPnjh_4HyuEjW0,4937
|
151 |
+
huggingface_hub/inference/_generated/types/token_classification.py,sha256=C0K5OukrNTyvZE-AdIbblacTtyc1Ita374bwfNw2Y-8,1894
|
152 |
+
huggingface_hub/inference/_generated/types/translation.py,sha256=AYfr0n-o0HsK21QNs0HMufI1wCMbtRhAZvNK40hjAYk,1742
|
153 |
+
huggingface_hub/inference/_generated/types/video_classification.py,sha256=e6J5BNqQ5gtyHAuVB-UPo4RYy4AIT6XSShhGA13CB_E,1659
|
154 |
+
huggingface_hub/inference/_generated/types/visual_question_answering.py,sha256=OgjxhaZ42Q3pX5S7e1CGHVNKg9-wZpdhuylkylds57Y,1641
|
155 |
+
huggingface_hub/inference/_generated/types/zero_shot_classification.py,sha256=XysOHfPIx6V3jBuUM1P0QM4UjpkGQV816awIN_W5Oqk,1717
|
156 |
+
huggingface_hub/inference/_generated/types/zero_shot_image_classification.py,sha256=sXIYvmrX0DreSfjERDKeY9iTFV8Zg_UEo6m5xsOsInE,1466
|
157 |
+
huggingface_hub/inference/_generated/types/zero_shot_object_detection.py,sha256=LFtdjPxuEMsEcCqqrJETV05t9bz4CpdYMfU2a-eBvl4,1598
|
158 |
+
huggingface_hub/inference_api.py,sha256=b4-NhPSn9b44nYKV8tDKXodmE4JVdEymMWL4CVGkzlE,8323
|
159 |
+
huggingface_hub/keras_mixin.py,sha256=3d2oW35SALXHq-WHoLD_tbq0UrcabGKj3HidtPRx51U,19574
|
160 |
+
huggingface_hub/lfs.py,sha256=n-TIjK7J7aXG3zi__0nkd6aNkE4djOf9CD6dYQOQ5P8,16649
|
161 |
+
huggingface_hub/repocard.py,sha256=86FiLCufy9oLf6moGkfmi9h3euGwDDY4eLrs0GKWBRk,34732
|
162 |
+
huggingface_hub/repocard_data.py,sha256=rgpr3xbwMNvmoazkz0AgRaWINPdyy5kd70eVn0j4AzQ,33204
|
163 |
+
huggingface_hub/repository.py,sha256=xVQR-MRKNDfJ_Z_99DwtXZB3xNO06eYG_GvRM4fLiTU,54557
|
164 |
+
huggingface_hub/serialization/__init__.py,sha256=kn-Fa-m4FzMnN8lNsF-SwFcfzug4CucexybGKyvZ8S0,1041
|
165 |
+
huggingface_hub/serialization/__pycache__/__init__.cpython-313.pyc,,
|
166 |
+
huggingface_hub/serialization/__pycache__/_base.cpython-313.pyc,,
|
167 |
+
huggingface_hub/serialization/__pycache__/_dduf.cpython-313.pyc,,
|
168 |
+
huggingface_hub/serialization/__pycache__/_tensorflow.cpython-313.pyc,,
|
169 |
+
huggingface_hub/serialization/__pycache__/_torch.cpython-313.pyc,,
|
170 |
+
huggingface_hub/serialization/_base.py,sha256=JZneES-HgcRH9C2SQehIGRDtT7nS7emu-RRV4ZjB6xo,8124
|
171 |
+
huggingface_hub/serialization/_dduf.py,sha256=s42239rLiHwaJE36QDEmS5GH7DSmQ__BffiHJO5RjIg,15424
|
172 |
+
huggingface_hub/serialization/_tensorflow.py,sha256=zHOvEMg-JHC55Fm4roDT3LUCDO5zB9qtXZffG065RAM,3625
|
173 |
+
huggingface_hub/serialization/_torch.py,sha256=fX3KHMkW3h9eOm5wGvmNeAq3GU5lxq9db5c57kSX77k,44717
|
174 |
+
huggingface_hub/templates/datasetcard_template.md,sha256=W-EMqR6wndbrnZorkVv56URWPG49l7MATGeI015kTvs,5503
|
175 |
+
huggingface_hub/templates/modelcard_template.md,sha256=4AqArS3cqdtbit5Bo-DhjcnDFR-pza5hErLLTPM4Yuc,6870
|
176 |
+
huggingface_hub/utils/__init__.py,sha256=aMEsiXGi93z-dXz1W7FFma71tAMeKw0SoKVZSQUeE_4,3525
|
177 |
+
huggingface_hub/utils/__pycache__/__init__.cpython-313.pyc,,
|
178 |
+
huggingface_hub/utils/__pycache__/_auth.cpython-313.pyc,,
|
179 |
+
huggingface_hub/utils/__pycache__/_cache_assets.cpython-313.pyc,,
|
180 |
+
huggingface_hub/utils/__pycache__/_cache_manager.cpython-313.pyc,,
|
181 |
+
huggingface_hub/utils/__pycache__/_chunk_utils.cpython-313.pyc,,
|
182 |
+
huggingface_hub/utils/__pycache__/_datetime.cpython-313.pyc,,
|
183 |
+
huggingface_hub/utils/__pycache__/_deprecation.cpython-313.pyc,,
|
184 |
+
huggingface_hub/utils/__pycache__/_experimental.cpython-313.pyc,,
|
185 |
+
huggingface_hub/utils/__pycache__/_fixes.cpython-313.pyc,,
|
186 |
+
huggingface_hub/utils/__pycache__/_git_credential.cpython-313.pyc,,
|
187 |
+
huggingface_hub/utils/__pycache__/_headers.cpython-313.pyc,,
|
188 |
+
huggingface_hub/utils/__pycache__/_hf_folder.cpython-313.pyc,,
|
189 |
+
huggingface_hub/utils/__pycache__/_http.cpython-313.pyc,,
|
190 |
+
huggingface_hub/utils/__pycache__/_lfs.cpython-313.pyc,,
|
191 |
+
huggingface_hub/utils/__pycache__/_pagination.cpython-313.pyc,,
|
192 |
+
huggingface_hub/utils/__pycache__/_paths.cpython-313.pyc,,
|
193 |
+
huggingface_hub/utils/__pycache__/_runtime.cpython-313.pyc,,
|
194 |
+
huggingface_hub/utils/__pycache__/_safetensors.cpython-313.pyc,,
|
195 |
+
huggingface_hub/utils/__pycache__/_subprocess.cpython-313.pyc,,
|
196 |
+
huggingface_hub/utils/__pycache__/_telemetry.cpython-313.pyc,,
|
197 |
+
huggingface_hub/utils/__pycache__/_typing.cpython-313.pyc,,
|
198 |
+
huggingface_hub/utils/__pycache__/_validators.cpython-313.pyc,,
|
199 |
+
huggingface_hub/utils/__pycache__/endpoint_helpers.cpython-313.pyc,,
|
200 |
+
huggingface_hub/utils/__pycache__/insecure_hashlib.cpython-313.pyc,,
|
201 |
+
huggingface_hub/utils/__pycache__/logging.cpython-313.pyc,,
|
202 |
+
huggingface_hub/utils/__pycache__/sha.cpython-313.pyc,,
|
203 |
+
huggingface_hub/utils/__pycache__/tqdm.cpython-313.pyc,,
|
204 |
+
huggingface_hub/utils/_auth.py,sha256=-9p3SSOtWKMMCDKlsM_-ebsIGX0sSgKTSnC-_O4kTxg,8294
|
205 |
+
huggingface_hub/utils/_cache_assets.py,sha256=kai77HPQMfYpROouMBQCr_gdBCaeTm996Sqj0dExbNg,5728
|
206 |
+
huggingface_hub/utils/_cache_manager.py,sha256=GhiuVQsEkWU55uYkkgiGJV1_naeciyk8u4qb4WTIVyw,34531
|
207 |
+
huggingface_hub/utils/_chunk_utils.py,sha256=kRCaj5228_vKcyLWspd8Xq01f17Jz6ds5Sr9ed5d_RU,2130
|
208 |
+
huggingface_hub/utils/_datetime.py,sha256=kCS5jaKV25kOncX1xujbXsz5iDLcjLcLw85semGNzxQ,2770
|
209 |
+
huggingface_hub/utils/_deprecation.py,sha256=HZhRGGUX_QMKBBBwHHlffLtmCSK01TOpeXHefZbPfwI,4872
|
210 |
+
huggingface_hub/utils/_experimental.py,sha256=crCPH6k6-11wwH2GZuZzZzZbjUotay49ywV1SSJhMHM,2395
|
211 |
+
huggingface_hub/utils/_fixes.py,sha256=C-feEQOZtXGHP4E2Qlg2VyxWSZhdCVmd0_B6zgiPcWA,4045
|
212 |
+
huggingface_hub/utils/_git_credential.py,sha256=SDdsiREr1TcAR2Ze2TB0E5cYzVJgvDZrs60od9lAsMc,4596
|
213 |
+
huggingface_hub/utils/_headers.py,sha256=A-OYkrVG8LQGgeNq79rIbPD0ovHBibnoASoxJESzeuY,8698
|
214 |
+
huggingface_hub/utils/_hf_folder.py,sha256=cffdBqvvk0E_DW9rkaaVXLYOTFsOREDB_Y98evp_nWI,3612
|
215 |
+
huggingface_hub/utils/_http.py,sha256=-dfsV0Bs9pb8EStpcJvDGOfp_sKYD-re-XbIVITVAyo,22670
|
216 |
+
huggingface_hub/utils/_lfs.py,sha256=EC0Oz6Wiwl8foRNkUOzrETXzAWlbgpnpxo5a410ovFY,3957
|
217 |
+
huggingface_hub/utils/_pagination.py,sha256=hzLFLd8i_DKkPRVYzOx2CxLt5lcocEiAxDJriQUjAjY,1841
|
218 |
+
huggingface_hub/utils/_paths.py,sha256=w1ZhFmmD5ykWjp_hAvhjtOoa2ZUcOXJrF4a6O3QpAWo,5042
|
219 |
+
huggingface_hub/utils/_runtime.py,sha256=tUyWylDgqaOXnMg39rvyusiruVN5ulcqiSwUEkQ9jjg,11195
|
220 |
+
huggingface_hub/utils/_safetensors.py,sha256=GW3nyv7xQcuwObKYeYoT9VhURVzG1DZTbKBKho8Bbos,4458
|
221 |
+
huggingface_hub/utils/_subprocess.py,sha256=6GpGD4qE9-Z1-Ocs3JuCLjR4NcRlknA-hAuQlqiprYY,4595
|
222 |
+
huggingface_hub/utils/_telemetry.py,sha256=54LXeIJU5pEGghPAh06gqNAR-UoxOjVLvKqAQscwqZs,4890
|
223 |
+
huggingface_hub/utils/_typing.py,sha256=UO0-GeTbiKFV9GqDh4YNRyScQSRAAZRoUeEYQX4P0rE,2882
|
224 |
+
huggingface_hub/utils/_validators.py,sha256=dDsVG31iooTYrIyi5Vwr1DukL0fEmJwu3ceVNduhsuE,9204
|
225 |
+
huggingface_hub/utils/endpoint_helpers.py,sha256=9VtIAlxQ5H_4y30sjCAgbu7XCqAtNLC7aRYxaNn0hLI,2366
|
226 |
+
huggingface_hub/utils/insecure_hashlib.py,sha256=OjxlvtSQHpbLp9PWSrXBDJ0wHjxCBU-SQJgucEEXDbU,1058
|
227 |
+
huggingface_hub/utils/logging.py,sha256=Cp03s0uEl3kDM9XHQW9a8GAoExODQ-e7kEtgMt-_To8,4728
|
228 |
+
huggingface_hub/utils/sha.py,sha256=OFnNGCba0sNcT2gUwaVCJnldxlltrHHe0DS_PCpV3C4,2134
|
229 |
+
huggingface_hub/utils/tqdm.py,sha256=ZgdphuTnwAIaUKnnD2P7qVvNHpzHAyrYoItkiV0aEjQ,9835
|
meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/REQUESTED
ADDED
File without changes
|
meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: bdist_wheel (0.45.1)
|
3 |
+
Root-Is-Purelib: true
|
4 |
+
Tag: py3-none-any
|
5 |
+
|
meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/entry_points.txt
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[console_scripts]
|
2 |
+
huggingface-cli = huggingface_hub.commands.huggingface_cli:main
|
3 |
+
|
4 |
+
[fsspec.specs]
|
5 |
+
hf=huggingface_hub.HfFileSystem
|
6 |
+
|
meow/lib/python3.13/site-packages/huggingface_hub-0.27.0.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
huggingface_hub
|
meow/lib/python3.13/site-packages/idna/codec.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import codecs
|
2 |
+
import re
|
3 |
+
from typing import Any, Optional, Tuple
|
4 |
+
|
5 |
+
from .core import IDNAError, alabel, decode, encode, ulabel
|
6 |
+
|
7 |
+
_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
|
8 |
+
|
9 |
+
|
10 |
+
class Codec(codecs.Codec):
|
11 |
+
def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]:
|
12 |
+
if errors != "strict":
|
13 |
+
raise IDNAError('Unsupported error handling "{}"'.format(errors))
|
14 |
+
|
15 |
+
if not data:
|
16 |
+
return b"", 0
|
17 |
+
|
18 |
+
return encode(data), len(data)
|
19 |
+
|
20 |
+
def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]:
|
21 |
+
if errors != "strict":
|
22 |
+
raise IDNAError('Unsupported error handling "{}"'.format(errors))
|
23 |
+
|
24 |
+
if not data:
|
25 |
+
return "", 0
|
26 |
+
|
27 |
+
return decode(data), len(data)
|
28 |
+
|
29 |
+
|
30 |
+
class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
|
31 |
+
def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
|
32 |
+
if errors != "strict":
|
33 |
+
raise IDNAError('Unsupported error handling "{}"'.format(errors))
|
34 |
+
|
35 |
+
if not data:
|
36 |
+
return b"", 0
|
37 |
+
|
38 |
+
labels = _unicode_dots_re.split(data)
|
39 |
+
trailing_dot = b""
|
40 |
+
if labels:
|
41 |
+
if not labels[-1]:
|
42 |
+
trailing_dot = b"."
|
43 |
+
del labels[-1]
|
44 |
+
elif not final:
|
45 |
+
# Keep potentially unfinished label until the next call
|
46 |
+
del labels[-1]
|
47 |
+
if labels:
|
48 |
+
trailing_dot = b"."
|
49 |
+
|
50 |
+
result = []
|
51 |
+
size = 0
|
52 |
+
for label in labels:
|
53 |
+
result.append(alabel(label))
|
54 |
+
if size:
|
55 |
+
size += 1
|
56 |
+
size += len(label)
|
57 |
+
|
58 |
+
# Join with U+002E
|
59 |
+
result_bytes = b".".join(result) + trailing_dot
|
60 |
+
size += len(trailing_dot)
|
61 |
+
return result_bytes, size
|
62 |
+
|
63 |
+
|
64 |
+
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
|
65 |
+
def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
|
66 |
+
if errors != "strict":
|
67 |
+
raise IDNAError('Unsupported error handling "{}"'.format(errors))
|
68 |
+
|
69 |
+
if not data:
|
70 |
+
return ("", 0)
|
71 |
+
|
72 |
+
if not isinstance(data, str):
|
73 |
+
data = str(data, "ascii")
|
74 |
+
|
75 |
+
labels = _unicode_dots_re.split(data)
|
76 |
+
trailing_dot = ""
|
77 |
+
if labels:
|
78 |
+
if not labels[-1]:
|
79 |
+
trailing_dot = "."
|
80 |
+
del labels[-1]
|
81 |
+
elif not final:
|
82 |
+
# Keep potentially unfinished label until the next call
|
83 |
+
del labels[-1]
|
84 |
+
if labels:
|
85 |
+
trailing_dot = "."
|
86 |
+
|
87 |
+
result = []
|
88 |
+
size = 0
|
89 |
+
for label in labels:
|
90 |
+
result.append(ulabel(label))
|
91 |
+
if size:
|
92 |
+
size += 1
|
93 |
+
size += len(label)
|
94 |
+
|
95 |
+
result_str = ".".join(result) + trailing_dot
|
96 |
+
size += len(trailing_dot)
|
97 |
+
return (result_str, size)
|
98 |
+
|
99 |
+
|
100 |
+
class StreamWriter(Codec, codecs.StreamWriter):
|
101 |
+
pass
|
102 |
+
|
103 |
+
|
104 |
+
class StreamReader(Codec, codecs.StreamReader):
|
105 |
+
pass
|
106 |
+
|
107 |
+
|
108 |
+
def search_function(name: str) -> Optional[codecs.CodecInfo]:
|
109 |
+
if name != "idna2008":
|
110 |
+
return None
|
111 |
+
return codecs.CodecInfo(
|
112 |
+
name=name,
|
113 |
+
encode=Codec().encode,
|
114 |
+
decode=Codec().decode,
|
115 |
+
incrementalencoder=IncrementalEncoder,
|
116 |
+
incrementaldecoder=IncrementalDecoder,
|
117 |
+
streamwriter=StreamWriter,
|
118 |
+
streamreader=StreamReader,
|
119 |
+
)
|
120 |
+
|
121 |
+
|
122 |
+
codecs.register(search_function)
|
meow/lib/python3.13/site-packages/idna/uts46data.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/LICENSE
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
This software is made available under the terms of *either* of the licenses
|
2 |
+
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
|
3 |
+
under the terms of *both* these licenses.
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/LICENSE.APACHE
ADDED
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
Apache License
|
3 |
+
Version 2.0, January 2004
|
4 |
+
http://www.apache.org/licenses/
|
5 |
+
|
6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
7 |
+
|
8 |
+
1. Definitions.
|
9 |
+
|
10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
12 |
+
|
13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
14 |
+
the copyright owner that is granting the License.
|
15 |
+
|
16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
17 |
+
other entities that control, are controlled by, or are under common
|
18 |
+
control with that entity. For the purposes of this definition,
|
19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
20 |
+
direction or management of such entity, whether by contract or
|
21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
23 |
+
|
24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
25 |
+
exercising permissions granted by this License.
|
26 |
+
|
27 |
+
"Source" form shall mean the preferred form for making modifications,
|
28 |
+
including but not limited to software source code, documentation
|
29 |
+
source, and configuration files.
|
30 |
+
|
31 |
+
"Object" form shall mean any form resulting from mechanical
|
32 |
+
transformation or translation of a Source form, including but
|
33 |
+
not limited to compiled object code, generated documentation,
|
34 |
+
and conversions to other media types.
|
35 |
+
|
36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
37 |
+
Object form, made available under the License, as indicated by a
|
38 |
+
copyright notice that is included in or attached to the work
|
39 |
+
(an example is provided in the Appendix below).
|
40 |
+
|
41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
42 |
+
form, that is based on (or derived from) the Work and for which the
|
43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
45 |
+
of this License, Derivative Works shall not include works that remain
|
46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
47 |
+
the Work and Derivative Works thereof.
|
48 |
+
|
49 |
+
"Contribution" shall mean any work of authorship, including
|
50 |
+
the original version of the Work and any modifications or additions
|
51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
55 |
+
means any form of electronic, verbal, or written communication sent
|
56 |
+
to the Licensor or its representatives, including but not limited to
|
57 |
+
communication on electronic mailing lists, source code control systems,
|
58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
60 |
+
excluding communication that is conspicuously marked or otherwise
|
61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
62 |
+
|
63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
65 |
+
subsequently incorporated within the Work.
|
66 |
+
|
67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
72 |
+
Work and such Derivative Works in Source or Object form.
|
73 |
+
|
74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
77 |
+
(except as stated in this section) patent license to make, have made,
|
78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
79 |
+
where such license applies only to those patent claims licensable
|
80 |
+
by such Contributor that are necessarily infringed by their
|
81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
83 |
+
institute patent litigation against any entity (including a
|
84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
85 |
+
or a Contribution incorporated within the Work constitutes direct
|
86 |
+
or contributory patent infringement, then any patent licenses
|
87 |
+
granted to You under this License for that Work shall terminate
|
88 |
+
as of the date such litigation is filed.
|
89 |
+
|
90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
91 |
+
Work or Derivative Works thereof in any medium, with or without
|
92 |
+
modifications, and in Source or Object form, provided that You
|
93 |
+
meet the following conditions:
|
94 |
+
|
95 |
+
(a) You must give any other recipients of the Work or
|
96 |
+
Derivative Works a copy of this License; and
|
97 |
+
|
98 |
+
(b) You must cause any modified files to carry prominent notices
|
99 |
+
stating that You changed the files; and
|
100 |
+
|
101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
102 |
+
that You distribute, all copyright, patent, trademark, and
|
103 |
+
attribution notices from the Source form of the Work,
|
104 |
+
excluding those notices that do not pertain to any part of
|
105 |
+
the Derivative Works; and
|
106 |
+
|
107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
108 |
+
distribution, then any Derivative Works that You distribute must
|
109 |
+
include a readable copy of the attribution notices contained
|
110 |
+
within such NOTICE file, excluding those notices that do not
|
111 |
+
pertain to any part of the Derivative Works, in at least one
|
112 |
+
of the following places: within a NOTICE text file distributed
|
113 |
+
as part of the Derivative Works; within the Source form or
|
114 |
+
documentation, if provided along with the Derivative Works; or,
|
115 |
+
within a display generated by the Derivative Works, if and
|
116 |
+
wherever such third-party notices normally appear. The contents
|
117 |
+
of the NOTICE file are for informational purposes only and
|
118 |
+
do not modify the License. You may add Your own attribution
|
119 |
+
notices within Derivative Works that You distribute, alongside
|
120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
121 |
+
that such additional attribution notices cannot be construed
|
122 |
+
as modifying the License.
|
123 |
+
|
124 |
+
You may add Your own copyright statement to Your modifications and
|
125 |
+
may provide additional or different license terms and conditions
|
126 |
+
for use, reproduction, or distribution of Your modifications, or
|
127 |
+
for any such Derivative Works as a whole, provided Your use,
|
128 |
+
reproduction, and distribution of the Work otherwise complies with
|
129 |
+
the conditions stated in this License.
|
130 |
+
|
131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
133 |
+
by You to the Licensor shall be under the terms and conditions of
|
134 |
+
this License, without any additional terms or conditions.
|
135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
136 |
+
the terms of any separate license agreement you may have executed
|
137 |
+
with Licensor regarding such Contributions.
|
138 |
+
|
139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
141 |
+
except as required for reasonable and customary use in describing the
|
142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
143 |
+
|
144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
145 |
+
agreed to in writing, Licensor provides the Work (and each
|
146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
148 |
+
implied, including, without limitation, any warranties or conditions
|
149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
151 |
+
appropriateness of using or redistributing the Work and assume any
|
152 |
+
risks associated with Your exercise of permissions under this License.
|
153 |
+
|
154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
155 |
+
whether in tort (including negligence), contract, or otherwise,
|
156 |
+
unless required by applicable law (such as deliberate and grossly
|
157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
158 |
+
liable to You for damages, including any direct, indirect, special,
|
159 |
+
incidental, or consequential damages of any character arising as a
|
160 |
+
result of this License or out of the use or inability to use the
|
161 |
+
Work (including but not limited to damages for loss of goodwill,
|
162 |
+
work stoppage, computer failure or malfunction, or any and all
|
163 |
+
other commercial damages or losses), even if such Contributor
|
164 |
+
has been advised of the possibility of such damages.
|
165 |
+
|
166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
169 |
+
or other liability obligations and/or rights consistent with this
|
170 |
+
License. However, in accepting such obligations, You may act only
|
171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
172 |
+
of any other Contributor, and only if You agree to indemnify,
|
173 |
+
defend, and hold each Contributor harmless for any liability
|
174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
175 |
+
of your accepting any such warranty or additional liability.
|
176 |
+
|
177 |
+
END OF TERMS AND CONDITIONS
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/LICENSE.BSD
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Copyright (c) Donald Stufft and individual contributors.
|
2 |
+
All rights reserved.
|
3 |
+
|
4 |
+
Redistribution and use in source and binary forms, with or without
|
5 |
+
modification, are permitted provided that the following conditions are met:
|
6 |
+
|
7 |
+
1. Redistributions of source code must retain the above copyright notice,
|
8 |
+
this list of conditions and the following disclaimer.
|
9 |
+
|
10 |
+
2. Redistributions in binary form must reproduce the above copyright
|
11 |
+
notice, this list of conditions and the following disclaimer in the
|
12 |
+
documentation and/or other materials provided with the distribution.
|
13 |
+
|
14 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
15 |
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
16 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
17 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
18 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
19 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
20 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
21 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
22 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
23 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/METADATA
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.3
|
2 |
+
Name: packaging
|
3 |
+
Version: 24.2
|
4 |
+
Summary: Core utilities for Python packages
|
5 |
+
Author-email: Donald Stufft <[email protected]>
|
6 |
+
Requires-Python: >=3.8
|
7 |
+
Description-Content-Type: text/x-rst
|
8 |
+
Classifier: Development Status :: 5 - Production/Stable
|
9 |
+
Classifier: Intended Audience :: Developers
|
10 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
11 |
+
Classifier: License :: OSI Approved :: BSD License
|
12 |
+
Classifier: Programming Language :: Python
|
13 |
+
Classifier: Programming Language :: Python :: 3
|
14 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
15 |
+
Classifier: Programming Language :: Python :: 3.8
|
16 |
+
Classifier: Programming Language :: Python :: 3.9
|
17 |
+
Classifier: Programming Language :: Python :: 3.10
|
18 |
+
Classifier: Programming Language :: Python :: 3.11
|
19 |
+
Classifier: Programming Language :: Python :: 3.12
|
20 |
+
Classifier: Programming Language :: Python :: 3.13
|
21 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
22 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
23 |
+
Classifier: Typing :: Typed
|
24 |
+
Project-URL: Documentation, https://packaging.pypa.io/
|
25 |
+
Project-URL: Source, https://github.com/pypa/packaging
|
26 |
+
|
27 |
+
packaging
|
28 |
+
=========
|
29 |
+
|
30 |
+
.. start-intro
|
31 |
+
|
32 |
+
Reusable core utilities for various Python Packaging
|
33 |
+
`interoperability specifications <https://packaging.python.org/specifications/>`_.
|
34 |
+
|
35 |
+
This library provides utilities that implement the interoperability
|
36 |
+
specifications which have clearly one correct behaviour (eg: :pep:`440`)
|
37 |
+
or benefit greatly from having a single shared implementation (eg: :pep:`425`).
|
38 |
+
|
39 |
+
.. end-intro
|
40 |
+
|
41 |
+
The ``packaging`` project includes the following: version handling, specifiers,
|
42 |
+
markers, requirements, tags, utilities.
|
43 |
+
|
44 |
+
Documentation
|
45 |
+
-------------
|
46 |
+
|
47 |
+
The `documentation`_ provides information and the API for the following:
|
48 |
+
|
49 |
+
- Version Handling
|
50 |
+
- Specifiers
|
51 |
+
- Markers
|
52 |
+
- Requirements
|
53 |
+
- Tags
|
54 |
+
- Utilities
|
55 |
+
|
56 |
+
Installation
|
57 |
+
------------
|
58 |
+
|
59 |
+
Use ``pip`` to install these utilities::
|
60 |
+
|
61 |
+
pip install packaging
|
62 |
+
|
63 |
+
The ``packaging`` library uses calendar-based versioning (``YY.N``).
|
64 |
+
|
65 |
+
Discussion
|
66 |
+
----------
|
67 |
+
|
68 |
+
If you run into bugs, you can file them in our `issue tracker`_.
|
69 |
+
|
70 |
+
You can also join ``#pypa`` on Freenode to ask questions or get involved.
|
71 |
+
|
72 |
+
|
73 |
+
.. _`documentation`: https://packaging.pypa.io/
|
74 |
+
.. _`issue tracker`: https://github.com/pypa/packaging/issues
|
75 |
+
|
76 |
+
|
77 |
+
Code of Conduct
|
78 |
+
---------------
|
79 |
+
|
80 |
+
Everyone interacting in the packaging project's codebases, issue trackers, chat
|
81 |
+
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
82 |
+
|
83 |
+
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
|
84 |
+
|
85 |
+
Contributing
|
86 |
+
------------
|
87 |
+
|
88 |
+
The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
|
89 |
+
well as how to report a potential security issue. The documentation for this
|
90 |
+
project also covers information about `project development`_ and `security`_.
|
91 |
+
|
92 |
+
.. _`project development`: https://packaging.pypa.io/en/latest/development/
|
93 |
+
.. _`security`: https://packaging.pypa.io/en/latest/security/
|
94 |
+
|
95 |
+
Project History
|
96 |
+
---------------
|
97 |
+
|
98 |
+
Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
|
99 |
+
recent changes and project history.
|
100 |
+
|
101 |
+
.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
|
102 |
+
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/RECORD
ADDED
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
packaging-24.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
2 |
+
packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
|
3 |
+
packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
4 |
+
packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
|
5 |
+
packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204
|
6 |
+
packaging-24.2.dist-info/RECORD,,
|
7 |
+
packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
8 |
+
packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494
|
9 |
+
packaging/__pycache__/__init__.cpython-313.pyc,,
|
10 |
+
packaging/__pycache__/_elffile.cpython-313.pyc,,
|
11 |
+
packaging/__pycache__/_manylinux.cpython-313.pyc,,
|
12 |
+
packaging/__pycache__/_musllinux.cpython-313.pyc,,
|
13 |
+
packaging/__pycache__/_parser.cpython-313.pyc,,
|
14 |
+
packaging/__pycache__/_structures.cpython-313.pyc,,
|
15 |
+
packaging/__pycache__/_tokenizer.cpython-313.pyc,,
|
16 |
+
packaging/__pycache__/markers.cpython-313.pyc,,
|
17 |
+
packaging/__pycache__/metadata.cpython-313.pyc,,
|
18 |
+
packaging/__pycache__/requirements.cpython-313.pyc,,
|
19 |
+
packaging/__pycache__/specifiers.cpython-313.pyc,,
|
20 |
+
packaging/__pycache__/tags.cpython-313.pyc,,
|
21 |
+
packaging/__pycache__/utils.cpython-313.pyc,,
|
22 |
+
packaging/__pycache__/version.cpython-313.pyc,,
|
23 |
+
packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306
|
24 |
+
packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612
|
25 |
+
packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
|
26 |
+
packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
|
27 |
+
packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
28 |
+
packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
|
29 |
+
packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715
|
30 |
+
packaging/licenses/__pycache__/__init__.cpython-313.pyc,,
|
31 |
+
packaging/licenses/__pycache__/_spdx.cpython-313.pyc,,
|
32 |
+
packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
|
33 |
+
packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561
|
34 |
+
packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762
|
35 |
+
packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
36 |
+
packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
|
37 |
+
packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074
|
38 |
+
packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014
|
39 |
+
packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
|
40 |
+
packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676
|
meow/lib/python3.13/site-packages/packaging-24.2.dist-info/WHEEL
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: flit 3.10.1
|
3 |
+
Root-Is-Purelib: true
|
4 |
+
Tag: py3-none-any
|
meow/lib/python3.13/site-packages/pip/__init__.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import List, Optional
|
2 |
+
|
3 |
+
__version__ = "24.3.1"
|
4 |
+
|
5 |
+
|
6 |
+
def main(args: Optional[List[str]] = None) -> int:
|
7 |
+
"""This is an internal API only meant for use by pip's own console scripts.
|
8 |
+
|
9 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
10 |
+
"""
|
11 |
+
from pip._internal.utils.entrypoints import _wrapper
|
12 |
+
|
13 |
+
return _wrapper(args)
|
meow/lib/python3.13/site-packages/pip/__main__.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
|
4 |
+
# Remove '' and current working directory from the first entry
|
5 |
+
# of sys.path, if present to avoid using current directory
|
6 |
+
# in pip commands check, freeze, install, list and show,
|
7 |
+
# when invoked as python -m pip <command>
|
8 |
+
if sys.path[0] in ("", os.getcwd()):
|
9 |
+
sys.path.pop(0)
|
10 |
+
|
11 |
+
# If we are running from a wheel, add the wheel to sys.path
|
12 |
+
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
13 |
+
if __package__ == "":
|
14 |
+
# __file__ is pip-*.whl/pip/__main__.py
|
15 |
+
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
16 |
+
# Resulting path is the name of the wheel itself
|
17 |
+
# Add that to sys.path so we can import pip
|
18 |
+
path = os.path.dirname(os.path.dirname(__file__))
|
19 |
+
sys.path.insert(0, path)
|
20 |
+
|
21 |
+
if __name__ == "__main__":
|
22 |
+
from pip._internal.cli.main import main as _main
|
23 |
+
|
24 |
+
sys.exit(_main())
|
meow/lib/python3.13/site-packages/pip/__pip-runner__.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Execute exactly this copy of pip, within a different environment.
|
2 |
+
|
3 |
+
This file is named as it is, to ensure that this module can't be imported via
|
4 |
+
an import statement.
|
5 |
+
"""
|
6 |
+
|
7 |
+
# /!\ This version compatibility check section must be Python 2 compatible. /!\
|
8 |
+
|
9 |
+
import sys
|
10 |
+
|
11 |
+
# Copied from pyproject.toml
|
12 |
+
PYTHON_REQUIRES = (3, 8)
|
13 |
+
|
14 |
+
|
15 |
+
def version_str(version): # type: ignore
|
16 |
+
return ".".join(str(v) for v in version)
|
17 |
+
|
18 |
+
|
19 |
+
if sys.version_info[:2] < PYTHON_REQUIRES:
|
20 |
+
raise SystemExit(
|
21 |
+
"This version of pip does not support python {} (requires >={}).".format(
|
22 |
+
version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
|
23 |
+
)
|
24 |
+
)
|
25 |
+
|
26 |
+
# From here on, we can use Python 3 features, but the syntax must remain
|
27 |
+
# Python 2 compatible.
|
28 |
+
|
29 |
+
import runpy # noqa: E402
|
30 |
+
from importlib.machinery import PathFinder # noqa: E402
|
31 |
+
from os.path import dirname # noqa: E402
|
32 |
+
|
33 |
+
PIP_SOURCES_ROOT = dirname(dirname(__file__))
|
34 |
+
|
35 |
+
|
36 |
+
class PipImportRedirectingFinder:
|
37 |
+
@classmethod
|
38 |
+
def find_spec(self, fullname, path=None, target=None): # type: ignore
|
39 |
+
if fullname != "pip":
|
40 |
+
return None
|
41 |
+
|
42 |
+
spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
|
43 |
+
assert spec, (PIP_SOURCES_ROOT, fullname)
|
44 |
+
return spec
|
45 |
+
|
46 |
+
|
47 |
+
sys.meta_path.insert(0, PipImportRedirectingFinder())
|
48 |
+
|
49 |
+
assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
|
50 |
+
runpy.run_module("pip", run_name="__main__", alter_sys=True)
|
meow/lib/python3.13/site-packages/pip/py.typed
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
pip is a command line program. While it is implemented in Python, and so is
|
2 |
+
available for import, you must not use pip's internal APIs in this way. Typing
|
3 |
+
information is provided as a convenience only and is not a guarantee. Expect
|
4 |
+
unannounced changes to the API and types in releases.
|
meow/lib/python3.13/site-packages/requests/__init__.py
ADDED
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# __
|
2 |
+
# /__) _ _ _ _ _/ _
|
3 |
+
# / ( (- (/ (/ (- _) / _)
|
4 |
+
# /
|
5 |
+
|
6 |
+
"""
|
7 |
+
Requests HTTP Library
|
8 |
+
~~~~~~~~~~~~~~~~~~~~~
|
9 |
+
|
10 |
+
Requests is an HTTP library, written in Python, for human beings.
|
11 |
+
Basic GET usage:
|
12 |
+
|
13 |
+
>>> import requests
|
14 |
+
>>> r = requests.get('https://www.python.org')
|
15 |
+
>>> r.status_code
|
16 |
+
200
|
17 |
+
>>> b'Python is a programming language' in r.content
|
18 |
+
True
|
19 |
+
|
20 |
+
... or POST:
|
21 |
+
|
22 |
+
>>> payload = dict(key1='value1', key2='value2')
|
23 |
+
>>> r = requests.post('https://httpbin.org/post', data=payload)
|
24 |
+
>>> print(r.text)
|
25 |
+
{
|
26 |
+
...
|
27 |
+
"form": {
|
28 |
+
"key1": "value1",
|
29 |
+
"key2": "value2"
|
30 |
+
},
|
31 |
+
...
|
32 |
+
}
|
33 |
+
|
34 |
+
The other HTTP methods are supported - see `requests.api`. Full documentation
|
35 |
+
is at <https://requests.readthedocs.io>.
|
36 |
+
|
37 |
+
:copyright: (c) 2017 by Kenneth Reitz.
|
38 |
+
:license: Apache 2.0, see LICENSE for more details.
|
39 |
+
"""
|
40 |
+
|
41 |
+
import warnings
|
42 |
+
|
43 |
+
import urllib3
|
44 |
+
|
45 |
+
from .exceptions import RequestsDependencyWarning
|
46 |
+
|
47 |
+
try:
|
48 |
+
from charset_normalizer import __version__ as charset_normalizer_version
|
49 |
+
except ImportError:
|
50 |
+
charset_normalizer_version = None
|
51 |
+
|
52 |
+
try:
|
53 |
+
from chardet import __version__ as chardet_version
|
54 |
+
except ImportError:
|
55 |
+
chardet_version = None
|
56 |
+
|
57 |
+
|
58 |
+
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
59 |
+
urllib3_version = urllib3_version.split(".")
|
60 |
+
assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
|
61 |
+
|
62 |
+
# Sometimes, urllib3 only reports its version as 16.1.
|
63 |
+
if len(urllib3_version) == 2:
|
64 |
+
urllib3_version.append("0")
|
65 |
+
|
66 |
+
# Check urllib3 for compatibility.
|
67 |
+
major, minor, patch = urllib3_version # noqa: F811
|
68 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
69 |
+
# urllib3 >= 1.21.1
|
70 |
+
assert major >= 1
|
71 |
+
if major == 1:
|
72 |
+
assert minor >= 21
|
73 |
+
|
74 |
+
# Check charset_normalizer for compatibility.
|
75 |
+
if chardet_version:
|
76 |
+
major, minor, patch = chardet_version.split(".")[:3]
|
77 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
78 |
+
# chardet_version >= 3.0.2, < 6.0.0
|
79 |
+
assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
|
80 |
+
elif charset_normalizer_version:
|
81 |
+
major, minor, patch = charset_normalizer_version.split(".")[:3]
|
82 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
83 |
+
# charset_normalizer >= 2.0.0 < 4.0.0
|
84 |
+
assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
|
85 |
+
else:
|
86 |
+
warnings.warn(
|
87 |
+
"Unable to find acceptable character detection dependency "
|
88 |
+
"(chardet or charset_normalizer).",
|
89 |
+
RequestsDependencyWarning,
|
90 |
+
)
|
91 |
+
|
92 |
+
|
93 |
+
def _check_cryptography(cryptography_version):
|
94 |
+
# cryptography < 1.3.4
|
95 |
+
try:
|
96 |
+
cryptography_version = list(map(int, cryptography_version.split(".")))
|
97 |
+
except ValueError:
|
98 |
+
return
|
99 |
+
|
100 |
+
if cryptography_version < [1, 3, 4]:
|
101 |
+
warning = "Old version of cryptography ({}) may cause slowdown.".format(
|
102 |
+
cryptography_version
|
103 |
+
)
|
104 |
+
warnings.warn(warning, RequestsDependencyWarning)
|
105 |
+
|
106 |
+
|
107 |
+
# Check imported dependencies for compatibility.
|
108 |
+
try:
|
109 |
+
check_compatibility(
|
110 |
+
urllib3.__version__, chardet_version, charset_normalizer_version
|
111 |
+
)
|
112 |
+
except (AssertionError, ValueError):
|
113 |
+
warnings.warn(
|
114 |
+
"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
115 |
+
"version!".format(
|
116 |
+
urllib3.__version__, chardet_version, charset_normalizer_version
|
117 |
+
),
|
118 |
+
RequestsDependencyWarning,
|
119 |
+
)
|
120 |
+
|
121 |
+
# Attempt to enable urllib3's fallback for SNI support
|
122 |
+
# if the standard library doesn't support SNI or the
|
123 |
+
# 'ssl' library isn't available.
|
124 |
+
try:
|
125 |
+
try:
|
126 |
+
import ssl
|
127 |
+
except ImportError:
|
128 |
+
ssl = None
|
129 |
+
|
130 |
+
if not getattr(ssl, "HAS_SNI", False):
|
131 |
+
from urllib3.contrib import pyopenssl
|
132 |
+
|
133 |
+
pyopenssl.inject_into_urllib3()
|
134 |
+
|
135 |
+
# Check cryptography version
|
136 |
+
from cryptography import __version__ as cryptography_version
|
137 |
+
|
138 |
+
_check_cryptography(cryptography_version)
|
139 |
+
except ImportError:
|
140 |
+
pass
|
141 |
+
|
142 |
+
# urllib3's DependencyWarnings should be silenced.
|
143 |
+
from urllib3.exceptions import DependencyWarning
|
144 |
+
|
145 |
+
warnings.simplefilter("ignore", DependencyWarning)
|
146 |
+
|
147 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
148 |
+
import logging
|
149 |
+
from logging import NullHandler
|
150 |
+
|
151 |
+
from . import packages, utils
|
152 |
+
from .__version__ import (
|
153 |
+
__author__,
|
154 |
+
__author_email__,
|
155 |
+
__build__,
|
156 |
+
__cake__,
|
157 |
+
__copyright__,
|
158 |
+
__description__,
|
159 |
+
__license__,
|
160 |
+
__title__,
|
161 |
+
__url__,
|
162 |
+
__version__,
|
163 |
+
)
|
164 |
+
from .api import delete, get, head, options, patch, post, put, request
|
165 |
+
from .exceptions import (
|
166 |
+
ConnectionError,
|
167 |
+
ConnectTimeout,
|
168 |
+
FileModeWarning,
|
169 |
+
HTTPError,
|
170 |
+
JSONDecodeError,
|
171 |
+
ReadTimeout,
|
172 |
+
RequestException,
|
173 |
+
Timeout,
|
174 |
+
TooManyRedirects,
|
175 |
+
URLRequired,
|
176 |
+
)
|
177 |
+
from .models import PreparedRequest, Request, Response
|
178 |
+
from .sessions import Session, session
|
179 |
+
from .status_codes import codes
|
180 |
+
|
181 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
182 |
+
|
183 |
+
# FileModeWarnings go off per the default.
|
184 |
+
warnings.simplefilter("default", FileModeWarning, append=True)
|
meow/lib/python3.13/site-packages/requests/adapters.py
ADDED
@@ -0,0 +1,719 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
requests.adapters
|
3 |
+
~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
This module contains the transport adapters that Requests uses to define
|
6 |
+
and maintain connections.
|
7 |
+
"""
|
8 |
+
|
9 |
+
import os.path
|
10 |
+
import socket # noqa: F401
|
11 |
+
import typing
|
12 |
+
import warnings
|
13 |
+
|
14 |
+
from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
|
15 |
+
from urllib3.exceptions import HTTPError as _HTTPError
|
16 |
+
from urllib3.exceptions import InvalidHeader as _InvalidHeader
|
17 |
+
from urllib3.exceptions import (
|
18 |
+
LocationValueError,
|
19 |
+
MaxRetryError,
|
20 |
+
NewConnectionError,
|
21 |
+
ProtocolError,
|
22 |
+
)
|
23 |
+
from urllib3.exceptions import ProxyError as _ProxyError
|
24 |
+
from urllib3.exceptions import ReadTimeoutError, ResponseError
|
25 |
+
from urllib3.exceptions import SSLError as _SSLError
|
26 |
+
from urllib3.poolmanager import PoolManager, proxy_from_url
|
27 |
+
from urllib3.util import Timeout as TimeoutSauce
|
28 |
+
from urllib3.util import parse_url
|
29 |
+
from urllib3.util.retry import Retry
|
30 |
+
from urllib3.util.ssl_ import create_urllib3_context
|
31 |
+
|
32 |
+
from .auth import _basic_auth_str
|
33 |
+
from .compat import basestring, urlparse
|
34 |
+
from .cookies import extract_cookies_to_jar
|
35 |
+
from .exceptions import (
|
36 |
+
ConnectionError,
|
37 |
+
ConnectTimeout,
|
38 |
+
InvalidHeader,
|
39 |
+
InvalidProxyURL,
|
40 |
+
InvalidSchema,
|
41 |
+
InvalidURL,
|
42 |
+
ProxyError,
|
43 |
+
ReadTimeout,
|
44 |
+
RetryError,
|
45 |
+
SSLError,
|
46 |
+
)
|
47 |
+
from .models import Response
|
48 |
+
from .structures import CaseInsensitiveDict
|
49 |
+
from .utils import (
|
50 |
+
DEFAULT_CA_BUNDLE_PATH,
|
51 |
+
extract_zipped_paths,
|
52 |
+
get_auth_from_url,
|
53 |
+
get_encoding_from_headers,
|
54 |
+
prepend_scheme_if_needed,
|
55 |
+
select_proxy,
|
56 |
+
urldefragauth,
|
57 |
+
)
|
58 |
+
|
59 |
+
try:
|
60 |
+
from urllib3.contrib.socks import SOCKSProxyManager
|
61 |
+
except ImportError:
|
62 |
+
|
63 |
+
def SOCKSProxyManager(*args, **kwargs):
|
64 |
+
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
65 |
+
|
66 |
+
|
67 |
+
if typing.TYPE_CHECKING:
|
68 |
+
from .models import PreparedRequest
|
69 |
+
|
70 |
+
|
71 |
+
DEFAULT_POOLBLOCK = False
|
72 |
+
DEFAULT_POOLSIZE = 10
|
73 |
+
DEFAULT_RETRIES = 0
|
74 |
+
DEFAULT_POOL_TIMEOUT = None
|
75 |
+
|
76 |
+
|
77 |
+
try:
|
78 |
+
import ssl # noqa: F401
|
79 |
+
|
80 |
+
_preloaded_ssl_context = create_urllib3_context()
|
81 |
+
_preloaded_ssl_context.load_verify_locations(
|
82 |
+
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
83 |
+
)
|
84 |
+
except ImportError:
|
85 |
+
# Bypass default SSLContext creation when Python
|
86 |
+
# interpreter isn't built with the ssl module.
|
87 |
+
_preloaded_ssl_context = None
|
88 |
+
|
89 |
+
|
90 |
+
def _urllib3_request_context(
|
91 |
+
request: "PreparedRequest",
|
92 |
+
verify: "bool | str | None",
|
93 |
+
client_cert: "typing.Tuple[str, str] | str | None",
|
94 |
+
poolmanager: "PoolManager",
|
95 |
+
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
|
96 |
+
host_params = {}
|
97 |
+
pool_kwargs = {}
|
98 |
+
parsed_request_url = urlparse(request.url)
|
99 |
+
scheme = parsed_request_url.scheme.lower()
|
100 |
+
port = parsed_request_url.port
|
101 |
+
|
102 |
+
# Determine if we have and should use our default SSLContext
|
103 |
+
# to optimize performance on standard requests.
|
104 |
+
poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {})
|
105 |
+
has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context")
|
106 |
+
should_use_default_ssl_context = (
|
107 |
+
_preloaded_ssl_context is not None and not has_poolmanager_ssl_context
|
108 |
+
)
|
109 |
+
|
110 |
+
cert_reqs = "CERT_REQUIRED"
|
111 |
+
if verify is False:
|
112 |
+
cert_reqs = "CERT_NONE"
|
113 |
+
elif verify is True and should_use_default_ssl_context:
|
114 |
+
pool_kwargs["ssl_context"] = _preloaded_ssl_context
|
115 |
+
elif isinstance(verify, str):
|
116 |
+
if not os.path.isdir(verify):
|
117 |
+
pool_kwargs["ca_certs"] = verify
|
118 |
+
else:
|
119 |
+
pool_kwargs["ca_cert_dir"] = verify
|
120 |
+
pool_kwargs["cert_reqs"] = cert_reqs
|
121 |
+
if client_cert is not None:
|
122 |
+
if isinstance(client_cert, tuple) and len(client_cert) == 2:
|
123 |
+
pool_kwargs["cert_file"] = client_cert[0]
|
124 |
+
pool_kwargs["key_file"] = client_cert[1]
|
125 |
+
else:
|
126 |
+
# According to our docs, we allow users to specify just the client
|
127 |
+
# cert path
|
128 |
+
pool_kwargs["cert_file"] = client_cert
|
129 |
+
host_params = {
|
130 |
+
"scheme": scheme,
|
131 |
+
"host": parsed_request_url.hostname,
|
132 |
+
"port": port,
|
133 |
+
}
|
134 |
+
return host_params, pool_kwargs
|
135 |
+
|
136 |
+
|
137 |
+
class BaseAdapter:
|
138 |
+
"""The Base Transport Adapter"""
|
139 |
+
|
140 |
+
def __init__(self):
|
141 |
+
super().__init__()
|
142 |
+
|
143 |
+
def send(
|
144 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
145 |
+
):
|
146 |
+
"""Sends PreparedRequest object. Returns Response object.
|
147 |
+
|
148 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
149 |
+
:param stream: (optional) Whether to stream the request content.
|
150 |
+
:param timeout: (optional) How long to wait for the server to send
|
151 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
152 |
+
read timeout) <timeouts>` tuple.
|
153 |
+
:type timeout: float or tuple
|
154 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
155 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
156 |
+
to a CA bundle to use
|
157 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
158 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
159 |
+
"""
|
160 |
+
raise NotImplementedError
|
161 |
+
|
162 |
+
def close(self):
|
163 |
+
"""Cleans up adapter specific items."""
|
164 |
+
raise NotImplementedError
|
165 |
+
|
166 |
+
|
167 |
+
class HTTPAdapter(BaseAdapter):
|
168 |
+
"""The built-in HTTP Adapter for urllib3.
|
169 |
+
|
170 |
+
Provides a general-case interface for Requests sessions to contact HTTP and
|
171 |
+
HTTPS urls by implementing the Transport Adapter interface. This class will
|
172 |
+
usually be created by the :class:`Session <Session>` class under the
|
173 |
+
covers.
|
174 |
+
|
175 |
+
:param pool_connections: The number of urllib3 connection pools to cache.
|
176 |
+
:param pool_maxsize: The maximum number of connections to save in the pool.
|
177 |
+
:param max_retries: The maximum number of retries each connection
|
178 |
+
should attempt. Note, this applies only to failed DNS lookups, socket
|
179 |
+
connections and connection timeouts, never to requests where data has
|
180 |
+
made it to the server. By default, Requests does not retry failed
|
181 |
+
connections. If you need granular control over the conditions under
|
182 |
+
which we retry a request, import urllib3's ``Retry`` class and pass
|
183 |
+
that instead.
|
184 |
+
:param pool_block: Whether the connection pool should block for connections.
|
185 |
+
|
186 |
+
Usage::
|
187 |
+
|
188 |
+
>>> import requests
|
189 |
+
>>> s = requests.Session()
|
190 |
+
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
191 |
+
>>> s.mount('http://', a)
|
192 |
+
"""
|
193 |
+
|
194 |
+
__attrs__ = [
|
195 |
+
"max_retries",
|
196 |
+
"config",
|
197 |
+
"_pool_connections",
|
198 |
+
"_pool_maxsize",
|
199 |
+
"_pool_block",
|
200 |
+
]
|
201 |
+
|
202 |
+
def __init__(
|
203 |
+
self,
|
204 |
+
pool_connections=DEFAULT_POOLSIZE,
|
205 |
+
pool_maxsize=DEFAULT_POOLSIZE,
|
206 |
+
max_retries=DEFAULT_RETRIES,
|
207 |
+
pool_block=DEFAULT_POOLBLOCK,
|
208 |
+
):
|
209 |
+
if max_retries == DEFAULT_RETRIES:
|
210 |
+
self.max_retries = Retry(0, read=False)
|
211 |
+
else:
|
212 |
+
self.max_retries = Retry.from_int(max_retries)
|
213 |
+
self.config = {}
|
214 |
+
self.proxy_manager = {}
|
215 |
+
|
216 |
+
super().__init__()
|
217 |
+
|
218 |
+
self._pool_connections = pool_connections
|
219 |
+
self._pool_maxsize = pool_maxsize
|
220 |
+
self._pool_block = pool_block
|
221 |
+
|
222 |
+
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
223 |
+
|
224 |
+
def __getstate__(self):
|
225 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
226 |
+
|
227 |
+
def __setstate__(self, state):
|
228 |
+
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
229 |
+
# self.poolmanager uses a lambda function, which isn't pickleable.
|
230 |
+
self.proxy_manager = {}
|
231 |
+
self.config = {}
|
232 |
+
|
233 |
+
for attr, value in state.items():
|
234 |
+
setattr(self, attr, value)
|
235 |
+
|
236 |
+
self.init_poolmanager(
|
237 |
+
self._pool_connections, self._pool_maxsize, block=self._pool_block
|
238 |
+
)
|
239 |
+
|
240 |
+
def init_poolmanager(
|
241 |
+
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
|
242 |
+
):
|
243 |
+
"""Initializes a urllib3 PoolManager.
|
244 |
+
|
245 |
+
This method should not be called from user code, and is only
|
246 |
+
exposed for use when subclassing the
|
247 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
248 |
+
|
249 |
+
:param connections: The number of urllib3 connection pools to cache.
|
250 |
+
:param maxsize: The maximum number of connections to save in the pool.
|
251 |
+
:param block: Block when no free connections are available.
|
252 |
+
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
253 |
+
"""
|
254 |
+
# save these values for pickling
|
255 |
+
self._pool_connections = connections
|
256 |
+
self._pool_maxsize = maxsize
|
257 |
+
self._pool_block = block
|
258 |
+
|
259 |
+
self.poolmanager = PoolManager(
|
260 |
+
num_pools=connections,
|
261 |
+
maxsize=maxsize,
|
262 |
+
block=block,
|
263 |
+
**pool_kwargs,
|
264 |
+
)
|
265 |
+
|
266 |
+
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
267 |
+
"""Return urllib3 ProxyManager for the given proxy.
|
268 |
+
|
269 |
+
This method should not be called from user code, and is only
|
270 |
+
exposed for use when subclassing the
|
271 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
272 |
+
|
273 |
+
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
274 |
+
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
275 |
+
:returns: ProxyManager
|
276 |
+
:rtype: urllib3.ProxyManager
|
277 |
+
"""
|
278 |
+
if proxy in self.proxy_manager:
|
279 |
+
manager = self.proxy_manager[proxy]
|
280 |
+
elif proxy.lower().startswith("socks"):
|
281 |
+
username, password = get_auth_from_url(proxy)
|
282 |
+
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
283 |
+
proxy,
|
284 |
+
username=username,
|
285 |
+
password=password,
|
286 |
+
num_pools=self._pool_connections,
|
287 |
+
maxsize=self._pool_maxsize,
|
288 |
+
block=self._pool_block,
|
289 |
+
**proxy_kwargs,
|
290 |
+
)
|
291 |
+
else:
|
292 |
+
proxy_headers = self.proxy_headers(proxy)
|
293 |
+
manager = self.proxy_manager[proxy] = proxy_from_url(
|
294 |
+
proxy,
|
295 |
+
proxy_headers=proxy_headers,
|
296 |
+
num_pools=self._pool_connections,
|
297 |
+
maxsize=self._pool_maxsize,
|
298 |
+
block=self._pool_block,
|
299 |
+
**proxy_kwargs,
|
300 |
+
)
|
301 |
+
|
302 |
+
return manager
|
303 |
+
|
304 |
+
def cert_verify(self, conn, url, verify, cert):
|
305 |
+
"""Verify a SSL certificate. This method should not be called from user
|
306 |
+
code, and is only exposed for use when subclassing the
|
307 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
308 |
+
|
309 |
+
:param conn: The urllib3 connection object associated with the cert.
|
310 |
+
:param url: The requested URL.
|
311 |
+
:param verify: Either a boolean, in which case it controls whether we verify
|
312 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
313 |
+
to a CA bundle to use
|
314 |
+
:param cert: The SSL certificate to verify.
|
315 |
+
"""
|
316 |
+
if url.lower().startswith("https") and verify:
|
317 |
+
conn.cert_reqs = "CERT_REQUIRED"
|
318 |
+
|
319 |
+
# Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
|
320 |
+
# Otherwise, if verify is a boolean, we don't load anything since
|
321 |
+
# the connection will be using a context with the default certificates already loaded,
|
322 |
+
# and this avoids a call to the slow load_verify_locations()
|
323 |
+
if verify is not True:
|
324 |
+
# `verify` must be a str with a path then
|
325 |
+
cert_loc = verify
|
326 |
+
|
327 |
+
if not os.path.exists(cert_loc):
|
328 |
+
raise OSError(
|
329 |
+
f"Could not find a suitable TLS CA certificate bundle, "
|
330 |
+
f"invalid path: {cert_loc}"
|
331 |
+
)
|
332 |
+
|
333 |
+
if not os.path.isdir(cert_loc):
|
334 |
+
conn.ca_certs = cert_loc
|
335 |
+
else:
|
336 |
+
conn.ca_cert_dir = cert_loc
|
337 |
+
else:
|
338 |
+
conn.cert_reqs = "CERT_NONE"
|
339 |
+
conn.ca_certs = None
|
340 |
+
conn.ca_cert_dir = None
|
341 |
+
|
342 |
+
if cert:
|
343 |
+
if not isinstance(cert, basestring):
|
344 |
+
conn.cert_file = cert[0]
|
345 |
+
conn.key_file = cert[1]
|
346 |
+
else:
|
347 |
+
conn.cert_file = cert
|
348 |
+
conn.key_file = None
|
349 |
+
if conn.cert_file and not os.path.exists(conn.cert_file):
|
350 |
+
raise OSError(
|
351 |
+
f"Could not find the TLS certificate file, "
|
352 |
+
f"invalid path: {conn.cert_file}"
|
353 |
+
)
|
354 |
+
if conn.key_file and not os.path.exists(conn.key_file):
|
355 |
+
raise OSError(
|
356 |
+
f"Could not find the TLS key file, invalid path: {conn.key_file}"
|
357 |
+
)
|
358 |
+
|
359 |
+
def build_response(self, req, resp):
|
360 |
+
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
361 |
+
response. This should not be called from user code, and is only exposed
|
362 |
+
for use when subclassing the
|
363 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
364 |
+
|
365 |
+
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
366 |
+
:param resp: The urllib3 response object.
|
367 |
+
:rtype: requests.Response
|
368 |
+
"""
|
369 |
+
response = Response()
|
370 |
+
|
371 |
+
# Fallback to None if there's no status_code, for whatever reason.
|
372 |
+
response.status_code = getattr(resp, "status", None)
|
373 |
+
|
374 |
+
# Make headers case-insensitive.
|
375 |
+
response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
|
376 |
+
|
377 |
+
# Set encoding.
|
378 |
+
response.encoding = get_encoding_from_headers(response.headers)
|
379 |
+
response.raw = resp
|
380 |
+
response.reason = response.raw.reason
|
381 |
+
|
382 |
+
if isinstance(req.url, bytes):
|
383 |
+
response.url = req.url.decode("utf-8")
|
384 |
+
else:
|
385 |
+
response.url = req.url
|
386 |
+
|
387 |
+
# Add new cookies from the server.
|
388 |
+
extract_cookies_to_jar(response.cookies, req, resp)
|
389 |
+
|
390 |
+
# Give the Response some context.
|
391 |
+
response.request = req
|
392 |
+
response.connection = self
|
393 |
+
|
394 |
+
return response
|
395 |
+
|
396 |
+
def build_connection_pool_key_attributes(self, request, verify, cert=None):
|
397 |
+
"""Build the PoolKey attributes used by urllib3 to return a connection.
|
398 |
+
|
399 |
+
This looks at the PreparedRequest, the user-specified verify value,
|
400 |
+
and the value of the cert parameter to determine what PoolKey values
|
401 |
+
to use to select a connection from a given urllib3 Connection Pool.
|
402 |
+
|
403 |
+
The SSL related pool key arguments are not consistently set. As of
|
404 |
+
this writing, use the following to determine what keys may be in that
|
405 |
+
dictionary:
|
406 |
+
|
407 |
+
* If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
|
408 |
+
default Requests SSL Context
|
409 |
+
* If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
|
410 |
+
``"cert_reqs"`` will be set
|
411 |
+
* If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
|
412 |
+
``"ca_certs"`` will be set if the string is not a directory recognized
|
413 |
+
by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be
|
414 |
+
set.
|
415 |
+
* If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
|
416 |
+
``"cert"`` is a tuple with a second item, ``"key_file"`` will also
|
417 |
+
be present
|
418 |
+
|
419 |
+
To override these settings, one may subclass this class, call this
|
420 |
+
method and use the above logic to change parameters as desired. For
|
421 |
+
example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
|
422 |
+
must both set ``"ssl_context"`` and based on what else they require,
|
423 |
+
alter the other keys to ensure the desired behaviour.
|
424 |
+
|
425 |
+
:param request:
|
426 |
+
The PreparedReqest being sent over the connection.
|
427 |
+
:type request:
|
428 |
+
:class:`~requests.models.PreparedRequest`
|
429 |
+
:param verify:
|
430 |
+
Either a boolean, in which case it controls whether
|
431 |
+
we verify the server's TLS certificate, or a string, in which case it
|
432 |
+
must be a path to a CA bundle to use.
|
433 |
+
:param cert:
|
434 |
+
(optional) Any user-provided SSL certificate for client
|
435 |
+
authentication (a.k.a., mTLS). This may be a string (i.e., just
|
436 |
+
the path to a file which holds both certificate and key) or a
|
437 |
+
tuple of length 2 with the certificate file path and key file
|
438 |
+
path.
|
439 |
+
:returns:
|
440 |
+
A tuple of two dictionaries. The first is the "host parameters"
|
441 |
+
portion of the Pool Key including scheme, hostname, and port. The
|
442 |
+
second is a dictionary of SSLContext related parameters.
|
443 |
+
"""
|
444 |
+
return _urllib3_request_context(request, verify, cert, self.poolmanager)
|
445 |
+
|
446 |
+
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
|
447 |
+
"""Returns a urllib3 connection for the given request and TLS settings.
|
448 |
+
This should not be called from user code, and is only exposed for use
|
449 |
+
when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
450 |
+
|
451 |
+
:param request:
|
452 |
+
The :class:`PreparedRequest <PreparedRequest>` object to be sent
|
453 |
+
over the connection.
|
454 |
+
:param verify:
|
455 |
+
Either a boolean, in which case it controls whether we verify the
|
456 |
+
server's TLS certificate, or a string, in which case it must be a
|
457 |
+
path to a CA bundle to use.
|
458 |
+
:param proxies:
|
459 |
+
(optional) The proxies dictionary to apply to the request.
|
460 |
+
:param cert:
|
461 |
+
(optional) Any user-provided SSL certificate to be used for client
|
462 |
+
authentication (a.k.a., mTLS).
|
463 |
+
:rtype:
|
464 |
+
urllib3.ConnectionPool
|
465 |
+
"""
|
466 |
+
proxy = select_proxy(request.url, proxies)
|
467 |
+
try:
|
468 |
+
host_params, pool_kwargs = self.build_connection_pool_key_attributes(
|
469 |
+
request,
|
470 |
+
verify,
|
471 |
+
cert,
|
472 |
+
)
|
473 |
+
except ValueError as e:
|
474 |
+
raise InvalidURL(e, request=request)
|
475 |
+
if proxy:
|
476 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
477 |
+
proxy_url = parse_url(proxy)
|
478 |
+
if not proxy_url.host:
|
479 |
+
raise InvalidProxyURL(
|
480 |
+
"Please check proxy URL. It is malformed "
|
481 |
+
"and could be missing the host."
|
482 |
+
)
|
483 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
484 |
+
conn = proxy_manager.connection_from_host(
|
485 |
+
**host_params, pool_kwargs=pool_kwargs
|
486 |
+
)
|
487 |
+
else:
|
488 |
+
# Only scheme should be lower case
|
489 |
+
conn = self.poolmanager.connection_from_host(
|
490 |
+
**host_params, pool_kwargs=pool_kwargs
|
491 |
+
)
|
492 |
+
|
493 |
+
return conn
|
494 |
+
|
495 |
+
def get_connection(self, url, proxies=None):
|
496 |
+
"""DEPRECATED: Users should move to `get_connection_with_tls_context`
|
497 |
+
for all subclasses of HTTPAdapter using Requests>=2.32.2.
|
498 |
+
|
499 |
+
Returns a urllib3 connection for the given URL. This should not be
|
500 |
+
called from user code, and is only exposed for use when subclassing the
|
501 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
502 |
+
|
503 |
+
:param url: The URL to connect to.
|
504 |
+
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
505 |
+
:rtype: urllib3.ConnectionPool
|
506 |
+
"""
|
507 |
+
warnings.warn(
|
508 |
+
(
|
509 |
+
"`get_connection` has been deprecated in favor of "
|
510 |
+
"`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
|
511 |
+
"will need to migrate for Requests>=2.32.2. Please see "
|
512 |
+
"https://github.com/psf/requests/pull/6710 for more details."
|
513 |
+
),
|
514 |
+
DeprecationWarning,
|
515 |
+
)
|
516 |
+
proxy = select_proxy(url, proxies)
|
517 |
+
|
518 |
+
if proxy:
|
519 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
520 |
+
proxy_url = parse_url(proxy)
|
521 |
+
if not proxy_url.host:
|
522 |
+
raise InvalidProxyURL(
|
523 |
+
"Please check proxy URL. It is malformed "
|
524 |
+
"and could be missing the host."
|
525 |
+
)
|
526 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
527 |
+
conn = proxy_manager.connection_from_url(url)
|
528 |
+
else:
|
529 |
+
# Only scheme should be lower case
|
530 |
+
parsed = urlparse(url)
|
531 |
+
url = parsed.geturl()
|
532 |
+
conn = self.poolmanager.connection_from_url(url)
|
533 |
+
|
534 |
+
return conn
|
535 |
+
|
536 |
+
def close(self):
|
537 |
+
"""Disposes of any internal state.
|
538 |
+
|
539 |
+
Currently, this closes the PoolManager and any active ProxyManager,
|
540 |
+
which closes any pooled connections.
|
541 |
+
"""
|
542 |
+
self.poolmanager.clear()
|
543 |
+
for proxy in self.proxy_manager.values():
|
544 |
+
proxy.clear()
|
545 |
+
|
546 |
+
def request_url(self, request, proxies):
|
547 |
+
"""Obtain the url to use when making the final request.
|
548 |
+
|
549 |
+
If the message is being sent through a HTTP proxy, the full URL has to
|
550 |
+
be used. Otherwise, we should only use the path portion of the URL.
|
551 |
+
|
552 |
+
This should not be called from user code, and is only exposed for use
|
553 |
+
when subclassing the
|
554 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
555 |
+
|
556 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
557 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
558 |
+
:rtype: str
|
559 |
+
"""
|
560 |
+
proxy = select_proxy(request.url, proxies)
|
561 |
+
scheme = urlparse(request.url).scheme
|
562 |
+
|
563 |
+
is_proxied_http_request = proxy and scheme != "https"
|
564 |
+
using_socks_proxy = False
|
565 |
+
if proxy:
|
566 |
+
proxy_scheme = urlparse(proxy).scheme.lower()
|
567 |
+
using_socks_proxy = proxy_scheme.startswith("socks")
|
568 |
+
|
569 |
+
url = request.path_url
|
570 |
+
if url.startswith("//"): # Don't confuse urllib3
|
571 |
+
url = f"/{url.lstrip('/')}"
|
572 |
+
|
573 |
+
if is_proxied_http_request and not using_socks_proxy:
|
574 |
+
url = urldefragauth(request.url)
|
575 |
+
|
576 |
+
return url
|
577 |
+
|
578 |
+
def add_headers(self, request, **kwargs):
|
579 |
+
"""Add any headers needed by the connection. As of v2.0 this does
|
580 |
+
nothing by default, but is left for overriding by users that subclass
|
581 |
+
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
582 |
+
|
583 |
+
This should not be called from user code, and is only exposed for use
|
584 |
+
when subclassing the
|
585 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
586 |
+
|
587 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
588 |
+
:param kwargs: The keyword arguments from the call to send().
|
589 |
+
"""
|
590 |
+
pass
|
591 |
+
|
592 |
+
def proxy_headers(self, proxy):
|
593 |
+
"""Returns a dictionary of the headers to add to any request sent
|
594 |
+
through a proxy. This works with urllib3 magic to ensure that they are
|
595 |
+
correctly sent to the proxy, rather than in a tunnelled request if
|
596 |
+
CONNECT is being used.
|
597 |
+
|
598 |
+
This should not be called from user code, and is only exposed for use
|
599 |
+
when subclassing the
|
600 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
601 |
+
|
602 |
+
:param proxy: The url of the proxy being used for this request.
|
603 |
+
:rtype: dict
|
604 |
+
"""
|
605 |
+
headers = {}
|
606 |
+
username, password = get_auth_from_url(proxy)
|
607 |
+
|
608 |
+
if username:
|
609 |
+
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
610 |
+
|
611 |
+
return headers
|
612 |
+
|
613 |
+
def send(
|
614 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
615 |
+
):
|
616 |
+
"""Sends PreparedRequest object. Returns Response object.
|
617 |
+
|
618 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
619 |
+
:param stream: (optional) Whether to stream the request content.
|
620 |
+
:param timeout: (optional) How long to wait for the server to send
|
621 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
622 |
+
read timeout) <timeouts>` tuple.
|
623 |
+
:type timeout: float or tuple or urllib3 Timeout object
|
624 |
+
:param verify: (optional) Either a boolean, in which case it controls whether
|
625 |
+
we verify the server's TLS certificate, or a string, in which case it
|
626 |
+
must be a path to a CA bundle to use
|
627 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
628 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
629 |
+
:rtype: requests.Response
|
630 |
+
"""
|
631 |
+
|
632 |
+
try:
|
633 |
+
conn = self.get_connection_with_tls_context(
|
634 |
+
request, verify, proxies=proxies, cert=cert
|
635 |
+
)
|
636 |
+
except LocationValueError as e:
|
637 |
+
raise InvalidURL(e, request=request)
|
638 |
+
|
639 |
+
self.cert_verify(conn, request.url, verify, cert)
|
640 |
+
url = self.request_url(request, proxies)
|
641 |
+
self.add_headers(
|
642 |
+
request,
|
643 |
+
stream=stream,
|
644 |
+
timeout=timeout,
|
645 |
+
verify=verify,
|
646 |
+
cert=cert,
|
647 |
+
proxies=proxies,
|
648 |
+
)
|
649 |
+
|
650 |
+
chunked = not (request.body is None or "Content-Length" in request.headers)
|
651 |
+
|
652 |
+
if isinstance(timeout, tuple):
|
653 |
+
try:
|
654 |
+
connect, read = timeout
|
655 |
+
timeout = TimeoutSauce(connect=connect, read=read)
|
656 |
+
except ValueError:
|
657 |
+
raise ValueError(
|
658 |
+
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
|
659 |
+
f"or a single float to set both timeouts to the same value."
|
660 |
+
)
|
661 |
+
elif isinstance(timeout, TimeoutSauce):
|
662 |
+
pass
|
663 |
+
else:
|
664 |
+
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
665 |
+
|
666 |
+
try:
|
667 |
+
resp = conn.urlopen(
|
668 |
+
method=request.method,
|
669 |
+
url=url,
|
670 |
+
body=request.body,
|
671 |
+
headers=request.headers,
|
672 |
+
redirect=False,
|
673 |
+
assert_same_host=False,
|
674 |
+
preload_content=False,
|
675 |
+
decode_content=False,
|
676 |
+
retries=self.max_retries,
|
677 |
+
timeout=timeout,
|
678 |
+
chunked=chunked,
|
679 |
+
)
|
680 |
+
|
681 |
+
except (ProtocolError, OSError) as err:
|
682 |
+
raise ConnectionError(err, request=request)
|
683 |
+
|
684 |
+
except MaxRetryError as e:
|
685 |
+
if isinstance(e.reason, ConnectTimeoutError):
|
686 |
+
# TODO: Remove this in 3.0.0: see #2811
|
687 |
+
if not isinstance(e.reason, NewConnectionError):
|
688 |
+
raise ConnectTimeout(e, request=request)
|
689 |
+
|
690 |
+
if isinstance(e.reason, ResponseError):
|
691 |
+
raise RetryError(e, request=request)
|
692 |
+
|
693 |
+
if isinstance(e.reason, _ProxyError):
|
694 |
+
raise ProxyError(e, request=request)
|
695 |
+
|
696 |
+
if isinstance(e.reason, _SSLError):
|
697 |
+
# This branch is for urllib3 v1.22 and later.
|
698 |
+
raise SSLError(e, request=request)
|
699 |
+
|
700 |
+
raise ConnectionError(e, request=request)
|
701 |
+
|
702 |
+
except ClosedPoolError as e:
|
703 |
+
raise ConnectionError(e, request=request)
|
704 |
+
|
705 |
+
except _ProxyError as e:
|
706 |
+
raise ProxyError(e)
|
707 |
+
|
708 |
+
except (_SSLError, _HTTPError) as e:
|
709 |
+
if isinstance(e, _SSLError):
|
710 |
+
# This branch is for urllib3 versions earlier than v1.22
|
711 |
+
raise SSLError(e, request=request)
|
712 |
+
elif isinstance(e, ReadTimeoutError):
|
713 |
+
raise ReadTimeout(e, request=request)
|
714 |
+
elif isinstance(e, _InvalidHeader):
|
715 |
+
raise InvalidHeader(e, request=request)
|
716 |
+
else:
|
717 |
+
raise
|
718 |
+
|
719 |
+
return self.build_response(request, resp)
|
meow/lib/python3.13/site-packages/requests/api.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
requests.api
|
3 |
+
~~~~~~~~~~~~
|
4 |
+
|
5 |
+
This module implements the Requests API.
|
6 |
+
|
7 |
+
:copyright: (c) 2012 by Kenneth Reitz.
|
8 |
+
:license: Apache2, see LICENSE for more details.
|
9 |
+
"""
|
10 |
+
|
11 |
+
from . import sessions
|
12 |
+
|
13 |
+
|
14 |
+
def request(method, url, **kwargs):
|
15 |
+
"""Constructs and sends a :class:`Request <Request>`.
|
16 |
+
|
17 |
+
:param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
|
18 |
+
:param url: URL for the new :class:`Request` object.
|
19 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
20 |
+
in the query string for the :class:`Request`.
|
21 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
22 |
+
object to send in the body of the :class:`Request`.
|
23 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
24 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
25 |
+
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
26 |
+
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
|
27 |
+
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
|
28 |
+
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string
|
29 |
+
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
|
30 |
+
to add for the file.
|
31 |
+
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
32 |
+
:param timeout: (optional) How many seconds to wait for the server to send data
|
33 |
+
before giving up, as a float, or a :ref:`(connect timeout, read
|
34 |
+
timeout) <timeouts>` tuple.
|
35 |
+
:type timeout: float or tuple
|
36 |
+
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
|
37 |
+
:type allow_redirects: bool
|
38 |
+
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
39 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
40 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
41 |
+
to a CA bundle to use. Defaults to ``True``.
|
42 |
+
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
43 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
44 |
+
:return: :class:`Response <Response>` object
|
45 |
+
:rtype: requests.Response
|
46 |
+
|
47 |
+
Usage::
|
48 |
+
|
49 |
+
>>> import requests
|
50 |
+
>>> req = requests.request('GET', 'https://httpbin.org/get')
|
51 |
+
>>> req
|
52 |
+
<Response [200]>
|
53 |
+
"""
|
54 |
+
|
55 |
+
# By using the 'with' statement we are sure the session is closed, thus we
|
56 |
+
# avoid leaving sockets open which can trigger a ResourceWarning in some
|
57 |
+
# cases, and look like a memory leak in others.
|
58 |
+
with sessions.Session() as session:
|
59 |
+
return session.request(method=method, url=url, **kwargs)
|
60 |
+
|
61 |
+
|
62 |
+
def get(url, params=None, **kwargs):
|
63 |
+
r"""Sends a GET request.
|
64 |
+
|
65 |
+
:param url: URL for the new :class:`Request` object.
|
66 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
67 |
+
in the query string for the :class:`Request`.
|
68 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
69 |
+
:return: :class:`Response <Response>` object
|
70 |
+
:rtype: requests.Response
|
71 |
+
"""
|
72 |
+
|
73 |
+
return request("get", url, params=params, **kwargs)
|
74 |
+
|
75 |
+
|
76 |
+
def options(url, **kwargs):
|
77 |
+
r"""Sends an OPTIONS request.
|
78 |
+
|
79 |
+
:param url: URL for the new :class:`Request` object.
|
80 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
81 |
+
:return: :class:`Response <Response>` object
|
82 |
+
:rtype: requests.Response
|
83 |
+
"""
|
84 |
+
|
85 |
+
return request("options", url, **kwargs)
|
86 |
+
|
87 |
+
|
88 |
+
def head(url, **kwargs):
|
89 |
+
r"""Sends a HEAD request.
|
90 |
+
|
91 |
+
:param url: URL for the new :class:`Request` object.
|
92 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes. If
|
93 |
+
`allow_redirects` is not provided, it will be set to `False` (as
|
94 |
+
opposed to the default :meth:`request` behavior).
|
95 |
+
:return: :class:`Response <Response>` object
|
96 |
+
:rtype: requests.Response
|
97 |
+
"""
|
98 |
+
|
99 |
+
kwargs.setdefault("allow_redirects", False)
|
100 |
+
return request("head", url, **kwargs)
|
101 |
+
|
102 |
+
|
103 |
+
def post(url, data=None, json=None, **kwargs):
|
104 |
+
r"""Sends a POST request.
|
105 |
+
|
106 |
+
:param url: URL for the new :class:`Request` object.
|
107 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
108 |
+
object to send in the body of the :class:`Request`.
|
109 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
110 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
111 |
+
:return: :class:`Response <Response>` object
|
112 |
+
:rtype: requests.Response
|
113 |
+
"""
|
114 |
+
|
115 |
+
return request("post", url, data=data, json=json, **kwargs)
|
116 |
+
|
117 |
+
|
118 |
+
def put(url, data=None, **kwargs):
|
119 |
+
r"""Sends a PUT request.
|
120 |
+
|
121 |
+
:param url: URL for the new :class:`Request` object.
|
122 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
123 |
+
object to send in the body of the :class:`Request`.
|
124 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
125 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
126 |
+
:return: :class:`Response <Response>` object
|
127 |
+
:rtype: requests.Response
|
128 |
+
"""
|
129 |
+
|
130 |
+
return request("put", url, data=data, **kwargs)
|
131 |
+
|
132 |
+
|
133 |
+
def patch(url, data=None, **kwargs):
|
134 |
+
r"""Sends a PATCH request.
|
135 |
+
|
136 |
+
:param url: URL for the new :class:`Request` object.
|
137 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
138 |
+
object to send in the body of the :class:`Request`.
|
139 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
140 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
141 |
+
:return: :class:`Response <Response>` object
|
142 |
+
:rtype: requests.Response
|
143 |
+
"""
|
144 |
+
|
145 |
+
return request("patch", url, data=data, **kwargs)
|
146 |
+
|
147 |
+
|
148 |
+
def delete(url, **kwargs):
|
149 |
+
r"""Sends a DELETE request.
|
150 |
+
|
151 |
+
:param url: URL for the new :class:`Request` object.
|
152 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
153 |
+
:return: :class:`Response <Response>` object
|
154 |
+
:rtype: requests.Response
|
155 |
+
"""
|
156 |
+
|
157 |
+
return request("delete", url, **kwargs)
|
meow/lib/python3.13/site-packages/requests/auth.py
ADDED
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
requests.auth
|
3 |
+
~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
This module contains the authentication handlers for Requests.
|
6 |
+
"""
|
7 |
+
|
8 |
+
import hashlib
|
9 |
+
import os
|
10 |
+
import re
|
11 |
+
import threading
|
12 |
+
import time
|
13 |
+
import warnings
|
14 |
+
from base64 import b64encode
|
15 |
+
|
16 |
+
from ._internal_utils import to_native_string
|
17 |
+
from .compat import basestring, str, urlparse
|
18 |
+
from .cookies import extract_cookies_to_jar
|
19 |
+
from .utils import parse_dict_header
|
20 |
+
|
21 |
+
CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
|
22 |
+
CONTENT_TYPE_MULTI_PART = "multipart/form-data"
|
23 |
+
|
24 |
+
|
25 |
+
def _basic_auth_str(username, password):
|
26 |
+
"""Returns a Basic Auth string."""
|
27 |
+
|
28 |
+
# "I want us to put a big-ol' comment on top of it that
|
29 |
+
# says that this behaviour is dumb but we need to preserve
|
30 |
+
# it because people are relying on it."
|
31 |
+
# - Lukasa
|
32 |
+
#
|
33 |
+
# These are here solely to maintain backwards compatibility
|
34 |
+
# for things like ints. This will be removed in 3.0.0.
|
35 |
+
if not isinstance(username, basestring):
|
36 |
+
warnings.warn(
|
37 |
+
"Non-string usernames will no longer be supported in Requests "
|
38 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
39 |
+
"a string or bytes object in the near future to avoid "
|
40 |
+
"problems.".format(username),
|
41 |
+
category=DeprecationWarning,
|
42 |
+
)
|
43 |
+
username = str(username)
|
44 |
+
|
45 |
+
if not isinstance(password, basestring):
|
46 |
+
warnings.warn(
|
47 |
+
"Non-string passwords will no longer be supported in Requests "
|
48 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
49 |
+
"a string or bytes object in the near future to avoid "
|
50 |
+
"problems.".format(type(password)),
|
51 |
+
category=DeprecationWarning,
|
52 |
+
)
|
53 |
+
password = str(password)
|
54 |
+
# -- End Removal --
|
55 |
+
|
56 |
+
if isinstance(username, str):
|
57 |
+
username = username.encode("latin1")
|
58 |
+
|
59 |
+
if isinstance(password, str):
|
60 |
+
password = password.encode("latin1")
|
61 |
+
|
62 |
+
authstr = "Basic " + to_native_string(
|
63 |
+
b64encode(b":".join((username, password))).strip()
|
64 |
+
)
|
65 |
+
|
66 |
+
return authstr
|
67 |
+
|
68 |
+
|
69 |
+
class AuthBase:
|
70 |
+
"""Base class that all auth implementations derive from"""
|
71 |
+
|
72 |
+
def __call__(self, r):
|
73 |
+
raise NotImplementedError("Auth hooks must be callable.")
|
74 |
+
|
75 |
+
|
76 |
+
class HTTPBasicAuth(AuthBase):
|
77 |
+
"""Attaches HTTP Basic Authentication to the given Request object."""
|
78 |
+
|
79 |
+
def __init__(self, username, password):
|
80 |
+
self.username = username
|
81 |
+
self.password = password
|
82 |
+
|
83 |
+
def __eq__(self, other):
|
84 |
+
return all(
|
85 |
+
[
|
86 |
+
self.username == getattr(other, "username", None),
|
87 |
+
self.password == getattr(other, "password", None),
|
88 |
+
]
|
89 |
+
)
|
90 |
+
|
91 |
+
def __ne__(self, other):
|
92 |
+
return not self == other
|
93 |
+
|
94 |
+
def __call__(self, r):
|
95 |
+
r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
|
96 |
+
return r
|
97 |
+
|
98 |
+
|
99 |
+
class HTTPProxyAuth(HTTPBasicAuth):
|
100 |
+
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
101 |
+
|
102 |
+
def __call__(self, r):
|
103 |
+
r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
|
104 |
+
return r
|
105 |
+
|
106 |
+
|
107 |
+
class HTTPDigestAuth(AuthBase):
|
108 |
+
"""Attaches HTTP Digest Authentication to the given Request object."""
|
109 |
+
|
110 |
+
def __init__(self, username, password):
|
111 |
+
self.username = username
|
112 |
+
self.password = password
|
113 |
+
# Keep state in per-thread local storage
|
114 |
+
self._thread_local = threading.local()
|
115 |
+
|
116 |
+
def init_per_thread_state(self):
|
117 |
+
# Ensure state is initialized just once per-thread
|
118 |
+
if not hasattr(self._thread_local, "init"):
|
119 |
+
self._thread_local.init = True
|
120 |
+
self._thread_local.last_nonce = ""
|
121 |
+
self._thread_local.nonce_count = 0
|
122 |
+
self._thread_local.chal = {}
|
123 |
+
self._thread_local.pos = None
|
124 |
+
self._thread_local.num_401_calls = None
|
125 |
+
|
126 |
+
def build_digest_header(self, method, url):
|
127 |
+
"""
|
128 |
+
:rtype: str
|
129 |
+
"""
|
130 |
+
|
131 |
+
realm = self._thread_local.chal["realm"]
|
132 |
+
nonce = self._thread_local.chal["nonce"]
|
133 |
+
qop = self._thread_local.chal.get("qop")
|
134 |
+
algorithm = self._thread_local.chal.get("algorithm")
|
135 |
+
opaque = self._thread_local.chal.get("opaque")
|
136 |
+
hash_utf8 = None
|
137 |
+
|
138 |
+
if algorithm is None:
|
139 |
+
_algorithm = "MD5"
|
140 |
+
else:
|
141 |
+
_algorithm = algorithm.upper()
|
142 |
+
# lambdas assume digest modules are imported at the top level
|
143 |
+
if _algorithm == "MD5" or _algorithm == "MD5-SESS":
|
144 |
+
|
145 |
+
def md5_utf8(x):
|
146 |
+
if isinstance(x, str):
|
147 |
+
x = x.encode("utf-8")
|
148 |
+
return hashlib.md5(x).hexdigest()
|
149 |
+
|
150 |
+
hash_utf8 = md5_utf8
|
151 |
+
elif _algorithm == "SHA":
|
152 |
+
|
153 |
+
def sha_utf8(x):
|
154 |
+
if isinstance(x, str):
|
155 |
+
x = x.encode("utf-8")
|
156 |
+
return hashlib.sha1(x).hexdigest()
|
157 |
+
|
158 |
+
hash_utf8 = sha_utf8
|
159 |
+
elif _algorithm == "SHA-256":
|
160 |
+
|
161 |
+
def sha256_utf8(x):
|
162 |
+
if isinstance(x, str):
|
163 |
+
x = x.encode("utf-8")
|
164 |
+
return hashlib.sha256(x).hexdigest()
|
165 |
+
|
166 |
+
hash_utf8 = sha256_utf8
|
167 |
+
elif _algorithm == "SHA-512":
|
168 |
+
|
169 |
+
def sha512_utf8(x):
|
170 |
+
if isinstance(x, str):
|
171 |
+
x = x.encode("utf-8")
|
172 |
+
return hashlib.sha512(x).hexdigest()
|
173 |
+
|
174 |
+
hash_utf8 = sha512_utf8
|
175 |
+
|
176 |
+
KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
|
177 |
+
|
178 |
+
if hash_utf8 is None:
|
179 |
+
return None
|
180 |
+
|
181 |
+
# XXX not implemented yet
|
182 |
+
entdig = None
|
183 |
+
p_parsed = urlparse(url)
|
184 |
+
#: path is request-uri defined in RFC 2616 which should not be empty
|
185 |
+
path = p_parsed.path or "/"
|
186 |
+
if p_parsed.query:
|
187 |
+
path += f"?{p_parsed.query}"
|
188 |
+
|
189 |
+
A1 = f"{self.username}:{realm}:{self.password}"
|
190 |
+
A2 = f"{method}:{path}"
|
191 |
+
|
192 |
+
HA1 = hash_utf8(A1)
|
193 |
+
HA2 = hash_utf8(A2)
|
194 |
+
|
195 |
+
if nonce == self._thread_local.last_nonce:
|
196 |
+
self._thread_local.nonce_count += 1
|
197 |
+
else:
|
198 |
+
self._thread_local.nonce_count = 1
|
199 |
+
ncvalue = f"{self._thread_local.nonce_count:08x}"
|
200 |
+
s = str(self._thread_local.nonce_count).encode("utf-8")
|
201 |
+
s += nonce.encode("utf-8")
|
202 |
+
s += time.ctime().encode("utf-8")
|
203 |
+
s += os.urandom(8)
|
204 |
+
|
205 |
+
cnonce = hashlib.sha1(s).hexdigest()[:16]
|
206 |
+
if _algorithm == "MD5-SESS":
|
207 |
+
HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
|
208 |
+
|
209 |
+
if not qop:
|
210 |
+
respdig = KD(HA1, f"{nonce}:{HA2}")
|
211 |
+
elif qop == "auth" or "auth" in qop.split(","):
|
212 |
+
noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
|
213 |
+
respdig = KD(HA1, noncebit)
|
214 |
+
else:
|
215 |
+
# XXX handle auth-int.
|
216 |
+
return None
|
217 |
+
|
218 |
+
self._thread_local.last_nonce = nonce
|
219 |
+
|
220 |
+
# XXX should the partial digests be encoded too?
|
221 |
+
base = (
|
222 |
+
f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
|
223 |
+
f'uri="{path}", response="{respdig}"'
|
224 |
+
)
|
225 |
+
if opaque:
|
226 |
+
base += f', opaque="{opaque}"'
|
227 |
+
if algorithm:
|
228 |
+
base += f', algorithm="{algorithm}"'
|
229 |
+
if entdig:
|
230 |
+
base += f', digest="{entdig}"'
|
231 |
+
if qop:
|
232 |
+
base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
|
233 |
+
|
234 |
+
return f"Digest {base}"
|
235 |
+
|
236 |
+
def handle_redirect(self, r, **kwargs):
|
237 |
+
"""Reset num_401_calls counter on redirects."""
|
238 |
+
if r.is_redirect:
|
239 |
+
self._thread_local.num_401_calls = 1
|
240 |
+
|
241 |
+
def handle_401(self, r, **kwargs):
|
242 |
+
"""
|
243 |
+
Takes the given response and tries digest-auth, if needed.
|
244 |
+
|
245 |
+
:rtype: requests.Response
|
246 |
+
"""
|
247 |
+
|
248 |
+
# If response is not 4xx, do not auth
|
249 |
+
# See https://github.com/psf/requests/issues/3772
|
250 |
+
if not 400 <= r.status_code < 500:
|
251 |
+
self._thread_local.num_401_calls = 1
|
252 |
+
return r
|
253 |
+
|
254 |
+
if self._thread_local.pos is not None:
|
255 |
+
# Rewind the file position indicator of the body to where
|
256 |
+
# it was to resend the request.
|
257 |
+
r.request.body.seek(self._thread_local.pos)
|
258 |
+
s_auth = r.headers.get("www-authenticate", "")
|
259 |
+
|
260 |
+
if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
261 |
+
self._thread_local.num_401_calls += 1
|
262 |
+
pat = re.compile(r"digest ", flags=re.IGNORECASE)
|
263 |
+
self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
|
264 |
+
|
265 |
+
# Consume content and release the original connection
|
266 |
+
# to allow our new request to reuse the same one.
|
267 |
+
r.content
|
268 |
+
r.close()
|
269 |
+
prep = r.request.copy()
|
270 |
+
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
271 |
+
prep.prepare_cookies(prep._cookies)
|
272 |
+
|
273 |
+
prep.headers["Authorization"] = self.build_digest_header(
|
274 |
+
prep.method, prep.url
|
275 |
+
)
|
276 |
+
_r = r.connection.send(prep, **kwargs)
|
277 |
+
_r.history.append(r)
|
278 |
+
_r.request = prep
|
279 |
+
|
280 |
+
return _r
|
281 |
+
|
282 |
+
self._thread_local.num_401_calls = 1
|
283 |
+
return r
|
284 |
+
|
285 |
+
def __call__(self, r):
|
286 |
+
# Initialize per-thread state, if needed
|
287 |
+
self.init_per_thread_state()
|
288 |
+
# If we have a saved nonce, skip the 401
|
289 |
+
if self._thread_local.last_nonce:
|
290 |
+
r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
|
291 |
+
try:
|
292 |
+
self._thread_local.pos = r.body.tell()
|
293 |
+
except AttributeError:
|
294 |
+
# In the case of HTTPDigestAuth being reused and the body of
|
295 |
+
# the previous request was a file-like object, pos has the
|
296 |
+
# file position of the previous body. Ensure it's set to
|
297 |
+
# None.
|
298 |
+
self._thread_local.pos = None
|
299 |
+
r.register_hook("response", self.handle_401)
|
300 |
+
r.register_hook("response", self.handle_redirect)
|
301 |
+
self._thread_local.num_401_calls = 1
|
302 |
+
|
303 |
+
return r
|
304 |
+
|
305 |
+
def __eq__(self, other):
|
306 |
+
return all(
|
307 |
+
[
|
308 |
+
self.username == getattr(other, "username", None),
|
309 |
+
self.password == getattr(other, "password", None),
|
310 |
+
]
|
311 |
+
)
|
312 |
+
|
313 |
+
def __ne__(self, other):
|
314 |
+
return not self == other
|
meow/lib/python3.13/site-packages/requests/certs.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.certs
|
5 |
+
~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module returns the preferred default CA certificate bundle. There is
|
8 |
+
only one — the one from the certifi package.
|
9 |
+
|
10 |
+
If you are packaging Requests, e.g., for a Linux distribution or a managed
|
11 |
+
environment, you can change the definition of where() to return a separately
|
12 |
+
packaged CA bundle.
|
13 |
+
"""
|
14 |
+
from certifi import where
|
15 |
+
|
16 |
+
if __name__ == "__main__":
|
17 |
+
print(where())
|
meow/lib/python3.13/site-packages/requests/compat.py
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
requests.compat
|
3 |
+
~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
This module previously handled import compatibility issues
|
6 |
+
between Python 2 and Python 3. It remains for backwards
|
7 |
+
compatibility until the next major version.
|
8 |
+
"""
|
9 |
+
|
10 |
+
import importlib
|
11 |
+
import sys
|
12 |
+
|
13 |
+
# -------------------
|
14 |
+
# Character Detection
|
15 |
+
# -------------------
|
16 |
+
|
17 |
+
|
18 |
+
def _resolve_char_detection():
|
19 |
+
"""Find supported character detection libraries."""
|
20 |
+
chardet = None
|
21 |
+
for lib in ("chardet", "charset_normalizer"):
|
22 |
+
if chardet is None:
|
23 |
+
try:
|
24 |
+
chardet = importlib.import_module(lib)
|
25 |
+
except ImportError:
|
26 |
+
pass
|
27 |
+
return chardet
|
28 |
+
|
29 |
+
|
30 |
+
chardet = _resolve_char_detection()
|
31 |
+
|
32 |
+
# -------
|
33 |
+
# Pythons
|
34 |
+
# -------
|
35 |
+
|
36 |
+
# Syntax sugar.
|
37 |
+
_ver = sys.version_info
|
38 |
+
|
39 |
+
#: Python 2.x?
|
40 |
+
is_py2 = _ver[0] == 2
|
41 |
+
|
42 |
+
#: Python 3.x?
|
43 |
+
is_py3 = _ver[0] == 3
|
44 |
+
|
45 |
+
# json/simplejson module import resolution
|
46 |
+
has_simplejson = False
|
47 |
+
try:
|
48 |
+
import simplejson as json
|
49 |
+
|
50 |
+
has_simplejson = True
|
51 |
+
except ImportError:
|
52 |
+
import json
|
53 |
+
|
54 |
+
if has_simplejson:
|
55 |
+
from simplejson import JSONDecodeError
|
56 |
+
else:
|
57 |
+
from json import JSONDecodeError
|
58 |
+
|
59 |
+
# Keep OrderedDict for backwards compatibility.
|
60 |
+
from collections import OrderedDict
|
61 |
+
from collections.abc import Callable, Mapping, MutableMapping
|
62 |
+
from http import cookiejar as cookielib
|
63 |
+
from http.cookies import Morsel
|
64 |
+
from io import StringIO
|
65 |
+
|
66 |
+
# --------------
|
67 |
+
# Legacy Imports
|
68 |
+
# --------------
|
69 |
+
from urllib.parse import (
|
70 |
+
quote,
|
71 |
+
quote_plus,
|
72 |
+
unquote,
|
73 |
+
unquote_plus,
|
74 |
+
urldefrag,
|
75 |
+
urlencode,
|
76 |
+
urljoin,
|
77 |
+
urlparse,
|
78 |
+
urlsplit,
|
79 |
+
urlunparse,
|
80 |
+
)
|
81 |
+
from urllib.request import (
|
82 |
+
getproxies,
|
83 |
+
getproxies_environment,
|
84 |
+
parse_http_list,
|
85 |
+
proxy_bypass,
|
86 |
+
proxy_bypass_environment,
|
87 |
+
)
|
88 |
+
|
89 |
+
builtin_str = str
|
90 |
+
str = str
|
91 |
+
bytes = bytes
|
92 |
+
basestring = (str, bytes)
|
93 |
+
numeric_types = (int, float)
|
94 |
+
integer_types = (int,)
|
meow/lib/python3.13/site-packages/requests/cookies.py
ADDED
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
requests.cookies
|
3 |
+
~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
|
6 |
+
|
7 |
+
requests.utils imports from here, so be careful with imports.
|
8 |
+
"""
|
9 |
+
|
10 |
+
import calendar
|
11 |
+
import copy
|
12 |
+
import time
|
13 |
+
|
14 |
+
from ._internal_utils import to_native_string
|
15 |
+
from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
|
16 |
+
|
17 |
+
try:
|
18 |
+
import threading
|
19 |
+
except ImportError:
|
20 |
+
import dummy_threading as threading
|
21 |
+
|
22 |
+
|
23 |
+
class MockRequest:
|
24 |
+
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
|
25 |
+
|
26 |
+
The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
|
27 |
+
manage cookie policies, i.e., determine whether a cookie can be set, given the
|
28 |
+
domains of the request and the cookie.
|
29 |
+
|
30 |
+
The original request object is read-only. The client is responsible for collecting
|
31 |
+
the new headers via `get_new_headers()` and interpreting them appropriately. You
|
32 |
+
probably want `get_cookie_header`, defined below.
|
33 |
+
"""
|
34 |
+
|
35 |
+
def __init__(self, request):
|
36 |
+
self._r = request
|
37 |
+
self._new_headers = {}
|
38 |
+
self.type = urlparse(self._r.url).scheme
|
39 |
+
|
40 |
+
def get_type(self):
|
41 |
+
return self.type
|
42 |
+
|
43 |
+
def get_host(self):
|
44 |
+
return urlparse(self._r.url).netloc
|
45 |
+
|
46 |
+
def get_origin_req_host(self):
|
47 |
+
return self.get_host()
|
48 |
+
|
49 |
+
def get_full_url(self):
|
50 |
+
# Only return the response's URL if the user hadn't set the Host
|
51 |
+
# header
|
52 |
+
if not self._r.headers.get("Host"):
|
53 |
+
return self._r.url
|
54 |
+
# If they did set it, retrieve it and reconstruct the expected domain
|
55 |
+
host = to_native_string(self._r.headers["Host"], encoding="utf-8")
|
56 |
+
parsed = urlparse(self._r.url)
|
57 |
+
# Reconstruct the URL as we expect it
|
58 |
+
return urlunparse(
|
59 |
+
[
|
60 |
+
parsed.scheme,
|
61 |
+
host,
|
62 |
+
parsed.path,
|
63 |
+
parsed.params,
|
64 |
+
parsed.query,
|
65 |
+
parsed.fragment,
|
66 |
+
]
|
67 |
+
)
|
68 |
+
|
69 |
+
def is_unverifiable(self):
|
70 |
+
return True
|
71 |
+
|
72 |
+
def has_header(self, name):
|
73 |
+
return name in self._r.headers or name in self._new_headers
|
74 |
+
|
75 |
+
def get_header(self, name, default=None):
|
76 |
+
return self._r.headers.get(name, self._new_headers.get(name, default))
|
77 |
+
|
78 |
+
def add_header(self, key, val):
|
79 |
+
"""cookiejar has no legitimate use for this method; add it back if you find one."""
|
80 |
+
raise NotImplementedError(
|
81 |
+
"Cookie headers should be added with add_unredirected_header()"
|
82 |
+
)
|
83 |
+
|
84 |
+
def add_unredirected_header(self, name, value):
|
85 |
+
self._new_headers[name] = value
|
86 |
+
|
87 |
+
def get_new_headers(self):
|
88 |
+
return self._new_headers
|
89 |
+
|
90 |
+
@property
|
91 |
+
def unverifiable(self):
|
92 |
+
return self.is_unverifiable()
|
93 |
+
|
94 |
+
@property
|
95 |
+
def origin_req_host(self):
|
96 |
+
return self.get_origin_req_host()
|
97 |
+
|
98 |
+
@property
|
99 |
+
def host(self):
|
100 |
+
return self.get_host()
|
101 |
+
|
102 |
+
|
103 |
+
class MockResponse:
|
104 |
+
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
105 |
+
|
106 |
+
...what? Basically, expose the parsed HTTP headers from the server response
|
107 |
+
the way `http.cookiejar` expects to see them.
|
108 |
+
"""
|
109 |
+
|
110 |
+
def __init__(self, headers):
|
111 |
+
"""Make a MockResponse for `cookiejar` to read.
|
112 |
+
|
113 |
+
:param headers: a httplib.HTTPMessage or analogous carrying the headers
|
114 |
+
"""
|
115 |
+
self._headers = headers
|
116 |
+
|
117 |
+
def info(self):
|
118 |
+
return self._headers
|
119 |
+
|
120 |
+
def getheaders(self, name):
|
121 |
+
self._headers.getheaders(name)
|
122 |
+
|
123 |
+
|
124 |
+
def extract_cookies_to_jar(jar, request, response):
|
125 |
+
"""Extract the cookies from the response into a CookieJar.
|
126 |
+
|
127 |
+
:param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
|
128 |
+
:param request: our own requests.Request object
|
129 |
+
:param response: urllib3.HTTPResponse object
|
130 |
+
"""
|
131 |
+
if not (hasattr(response, "_original_response") and response._original_response):
|
132 |
+
return
|
133 |
+
# the _original_response field is the wrapped httplib.HTTPResponse object,
|
134 |
+
req = MockRequest(request)
|
135 |
+
# pull out the HTTPMessage with the headers and put it in the mock:
|
136 |
+
res = MockResponse(response._original_response.msg)
|
137 |
+
jar.extract_cookies(res, req)
|
138 |
+
|
139 |
+
|
140 |
+
def get_cookie_header(jar, request):
|
141 |
+
"""
|
142 |
+
Produce an appropriate Cookie header string to be sent with `request`, or None.
|
143 |
+
|
144 |
+
:rtype: str
|
145 |
+
"""
|
146 |
+
r = MockRequest(request)
|
147 |
+
jar.add_cookie_header(r)
|
148 |
+
return r.get_new_headers().get("Cookie")
|
149 |
+
|
150 |
+
|
151 |
+
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
152 |
+
"""Unsets a cookie by name, by default over all domains and paths.
|
153 |
+
|
154 |
+
Wraps CookieJar.clear(), is O(n).
|
155 |
+
"""
|
156 |
+
clearables = []
|
157 |
+
for cookie in cookiejar:
|
158 |
+
if cookie.name != name:
|
159 |
+
continue
|
160 |
+
if domain is not None and domain != cookie.domain:
|
161 |
+
continue
|
162 |
+
if path is not None and path != cookie.path:
|
163 |
+
continue
|
164 |
+
clearables.append((cookie.domain, cookie.path, cookie.name))
|
165 |
+
|
166 |
+
for domain, path, name in clearables:
|
167 |
+
cookiejar.clear(domain, path, name)
|
168 |
+
|
169 |
+
|
170 |
+
class CookieConflictError(RuntimeError):
|
171 |
+
"""There are two cookies that meet the criteria specified in the cookie jar.
|
172 |
+
Use .get and .set and include domain and path args in order to be more specific.
|
173 |
+
"""
|
174 |
+
|
175 |
+
|
176 |
+
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
177 |
+
"""Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
|
178 |
+
interface.
|
179 |
+
|
180 |
+
This is the CookieJar we create by default for requests and sessions that
|
181 |
+
don't specify one, since some clients may expect response.cookies and
|
182 |
+
session.cookies to support dict operations.
|
183 |
+
|
184 |
+
Requests does not use the dict interface internally; it's just for
|
185 |
+
compatibility with external client code. All requests code should work
|
186 |
+
out of the box with externally provided instances of ``CookieJar``, e.g.
|
187 |
+
``LWPCookieJar`` and ``FileCookieJar``.
|
188 |
+
|
189 |
+
Unlike a regular CookieJar, this class is pickleable.
|
190 |
+
|
191 |
+
.. warning:: dictionary operations that are normally O(1) may be O(n).
|
192 |
+
"""
|
193 |
+
|
194 |
+
def get(self, name, default=None, domain=None, path=None):
|
195 |
+
"""Dict-like get() that also supports optional domain and path args in
|
196 |
+
order to resolve naming collisions from using one cookie jar over
|
197 |
+
multiple domains.
|
198 |
+
|
199 |
+
.. warning:: operation is O(n), not O(1).
|
200 |
+
"""
|
201 |
+
try:
|
202 |
+
return self._find_no_duplicates(name, domain, path)
|
203 |
+
except KeyError:
|
204 |
+
return default
|
205 |
+
|
206 |
+
def set(self, name, value, **kwargs):
|
207 |
+
"""Dict-like set() that also supports optional domain and path args in
|
208 |
+
order to resolve naming collisions from using one cookie jar over
|
209 |
+
multiple domains.
|
210 |
+
"""
|
211 |
+
# support client code that unsets cookies by assignment of a None value:
|
212 |
+
if value is None:
|
213 |
+
remove_cookie_by_name(
|
214 |
+
self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
|
215 |
+
)
|
216 |
+
return
|
217 |
+
|
218 |
+
if isinstance(value, Morsel):
|
219 |
+
c = morsel_to_cookie(value)
|
220 |
+
else:
|
221 |
+
c = create_cookie(name, value, **kwargs)
|
222 |
+
self.set_cookie(c)
|
223 |
+
return c
|
224 |
+
|
225 |
+
def iterkeys(self):
|
226 |
+
"""Dict-like iterkeys() that returns an iterator of names of cookies
|
227 |
+
from the jar.
|
228 |
+
|
229 |
+
.. seealso:: itervalues() and iteritems().
|
230 |
+
"""
|
231 |
+
for cookie in iter(self):
|
232 |
+
yield cookie.name
|
233 |
+
|
234 |
+
def keys(self):
|
235 |
+
"""Dict-like keys() that returns a list of names of cookies from the
|
236 |
+
jar.
|
237 |
+
|
238 |
+
.. seealso:: values() and items().
|
239 |
+
"""
|
240 |
+
return list(self.iterkeys())
|
241 |
+
|
242 |
+
def itervalues(self):
|
243 |
+
"""Dict-like itervalues() that returns an iterator of values of cookies
|
244 |
+
from the jar.
|
245 |
+
|
246 |
+
.. seealso:: iterkeys() and iteritems().
|
247 |
+
"""
|
248 |
+
for cookie in iter(self):
|
249 |
+
yield cookie.value
|
250 |
+
|
251 |
+
def values(self):
|
252 |
+
"""Dict-like values() that returns a list of values of cookies from the
|
253 |
+
jar.
|
254 |
+
|
255 |
+
.. seealso:: keys() and items().
|
256 |
+
"""
|
257 |
+
return list(self.itervalues())
|
258 |
+
|
259 |
+
def iteritems(self):
|
260 |
+
"""Dict-like iteritems() that returns an iterator of name-value tuples
|
261 |
+
from the jar.
|
262 |
+
|
263 |
+
.. seealso:: iterkeys() and itervalues().
|
264 |
+
"""
|
265 |
+
for cookie in iter(self):
|
266 |
+
yield cookie.name, cookie.value
|
267 |
+
|
268 |
+
def items(self):
|
269 |
+
"""Dict-like items() that returns a list of name-value tuples from the
|
270 |
+
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
|
271 |
+
vanilla python dict of key value pairs.
|
272 |
+
|
273 |
+
.. seealso:: keys() and values().
|
274 |
+
"""
|
275 |
+
return list(self.iteritems())
|
276 |
+
|
277 |
+
def list_domains(self):
|
278 |
+
"""Utility method to list all the domains in the jar."""
|
279 |
+
domains = []
|
280 |
+
for cookie in iter(self):
|
281 |
+
if cookie.domain not in domains:
|
282 |
+
domains.append(cookie.domain)
|
283 |
+
return domains
|
284 |
+
|
285 |
+
def list_paths(self):
|
286 |
+
"""Utility method to list all the paths in the jar."""
|
287 |
+
paths = []
|
288 |
+
for cookie in iter(self):
|
289 |
+
if cookie.path not in paths:
|
290 |
+
paths.append(cookie.path)
|
291 |
+
return paths
|
292 |
+
|
293 |
+
def multiple_domains(self):
|
294 |
+
"""Returns True if there are multiple domains in the jar.
|
295 |
+
Returns False otherwise.
|
296 |
+
|
297 |
+
:rtype: bool
|
298 |
+
"""
|
299 |
+
domains = []
|
300 |
+
for cookie in iter(self):
|
301 |
+
if cookie.domain is not None and cookie.domain in domains:
|
302 |
+
return True
|
303 |
+
domains.append(cookie.domain)
|
304 |
+
return False # there is only one domain in jar
|
305 |
+
|
306 |
+
def get_dict(self, domain=None, path=None):
|
307 |
+
"""Takes as an argument an optional domain and path and returns a plain
|
308 |
+
old Python dict of name-value pairs of cookies that meet the
|
309 |
+
requirements.
|
310 |
+
|
311 |
+
:rtype: dict
|
312 |
+
"""
|
313 |
+
dictionary = {}
|
314 |
+
for cookie in iter(self):
|
315 |
+
if (domain is None or cookie.domain == domain) and (
|
316 |
+
path is None or cookie.path == path
|
317 |
+
):
|
318 |
+
dictionary[cookie.name] = cookie.value
|
319 |
+
return dictionary
|
320 |
+
|
321 |
+
def __contains__(self, name):
|
322 |
+
try:
|
323 |
+
return super().__contains__(name)
|
324 |
+
except CookieConflictError:
|
325 |
+
return True
|
326 |
+
|
327 |
+
def __getitem__(self, name):
|
328 |
+
"""Dict-like __getitem__() for compatibility with client code. Throws
|
329 |
+
exception if there are more than one cookie with name. In that case,
|
330 |
+
use the more explicit get() method instead.
|
331 |
+
|
332 |
+
.. warning:: operation is O(n), not O(1).
|
333 |
+
"""
|
334 |
+
return self._find_no_duplicates(name)
|
335 |
+
|
336 |
+
def __setitem__(self, name, value):
|
337 |
+
"""Dict-like __setitem__ for compatibility with client code. Throws
|
338 |
+
exception if there is already a cookie of that name in the jar. In that
|
339 |
+
case, use the more explicit set() method instead.
|
340 |
+
"""
|
341 |
+
self.set(name, value)
|
342 |
+
|
343 |
+
def __delitem__(self, name):
|
344 |
+
"""Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
|
345 |
+
``remove_cookie_by_name()``.
|
346 |
+
"""
|
347 |
+
remove_cookie_by_name(self, name)
|
348 |
+
|
349 |
+
def set_cookie(self, cookie, *args, **kwargs):
|
350 |
+
if (
|
351 |
+
hasattr(cookie.value, "startswith")
|
352 |
+
and cookie.value.startswith('"')
|
353 |
+
and cookie.value.endswith('"')
|
354 |
+
):
|
355 |
+
cookie.value = cookie.value.replace('\\"', "")
|
356 |
+
return super().set_cookie(cookie, *args, **kwargs)
|
357 |
+
|
358 |
+
def update(self, other):
|
359 |
+
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
360 |
+
if isinstance(other, cookielib.CookieJar):
|
361 |
+
for cookie in other:
|
362 |
+
self.set_cookie(copy.copy(cookie))
|
363 |
+
else:
|
364 |
+
super().update(other)
|
365 |
+
|
366 |
+
def _find(self, name, domain=None, path=None):
|
367 |
+
"""Requests uses this method internally to get cookie values.
|
368 |
+
|
369 |
+
If there are conflicting cookies, _find arbitrarily chooses one.
|
370 |
+
See _find_no_duplicates if you want an exception thrown if there are
|
371 |
+
conflicting cookies.
|
372 |
+
|
373 |
+
:param name: a string containing name of cookie
|
374 |
+
:param domain: (optional) string containing domain of cookie
|
375 |
+
:param path: (optional) string containing path of cookie
|
376 |
+
:return: cookie.value
|
377 |
+
"""
|
378 |
+
for cookie in iter(self):
|
379 |
+
if cookie.name == name:
|
380 |
+
if domain is None or cookie.domain == domain:
|
381 |
+
if path is None or cookie.path == path:
|
382 |
+
return cookie.value
|
383 |
+
|
384 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
385 |
+
|
386 |
+
def _find_no_duplicates(self, name, domain=None, path=None):
|
387 |
+
"""Both ``__get_item__`` and ``get`` call this function: it's never
|
388 |
+
used elsewhere in Requests.
|
389 |
+
|
390 |
+
:param name: a string containing name of cookie
|
391 |
+
:param domain: (optional) string containing domain of cookie
|
392 |
+
:param path: (optional) string containing path of cookie
|
393 |
+
:raises KeyError: if cookie is not found
|
394 |
+
:raises CookieConflictError: if there are multiple cookies
|
395 |
+
that match name and optionally domain and path
|
396 |
+
:return: cookie.value
|
397 |
+
"""
|
398 |
+
toReturn = None
|
399 |
+
for cookie in iter(self):
|
400 |
+
if cookie.name == name:
|
401 |
+
if domain is None or cookie.domain == domain:
|
402 |
+
if path is None or cookie.path == path:
|
403 |
+
if toReturn is not None:
|
404 |
+
# if there are multiple cookies that meet passed in criteria
|
405 |
+
raise CookieConflictError(
|
406 |
+
f"There are multiple cookies with name, {name!r}"
|
407 |
+
)
|
408 |
+
# we will eventually return this as long as no cookie conflict
|
409 |
+
toReturn = cookie.value
|
410 |
+
|
411 |
+
if toReturn:
|
412 |
+
return toReturn
|
413 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
414 |
+
|
415 |
+
def __getstate__(self):
|
416 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
417 |
+
state = self.__dict__.copy()
|
418 |
+
# remove the unpickleable RLock object
|
419 |
+
state.pop("_cookies_lock")
|
420 |
+
return state
|
421 |
+
|
422 |
+
def __setstate__(self, state):
|
423 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
424 |
+
self.__dict__.update(state)
|
425 |
+
if "_cookies_lock" not in self.__dict__:
|
426 |
+
self._cookies_lock = threading.RLock()
|
427 |
+
|
428 |
+
def copy(self):
|
429 |
+
"""Return a copy of this RequestsCookieJar."""
|
430 |
+
new_cj = RequestsCookieJar()
|
431 |
+
new_cj.set_policy(self.get_policy())
|
432 |
+
new_cj.update(self)
|
433 |
+
return new_cj
|
434 |
+
|
435 |
+
def get_policy(self):
|
436 |
+
"""Return the CookiePolicy instance used."""
|
437 |
+
return self._policy
|
438 |
+
|
439 |
+
|
440 |
+
def _copy_cookie_jar(jar):
|
441 |
+
if jar is None:
|
442 |
+
return None
|
443 |
+
|
444 |
+
if hasattr(jar, "copy"):
|
445 |
+
# We're dealing with an instance of RequestsCookieJar
|
446 |
+
return jar.copy()
|
447 |
+
# We're dealing with a generic CookieJar instance
|
448 |
+
new_jar = copy.copy(jar)
|
449 |
+
new_jar.clear()
|
450 |
+
for cookie in jar:
|
451 |
+
new_jar.set_cookie(copy.copy(cookie))
|
452 |
+
return new_jar
|
453 |
+
|
454 |
+
|
455 |
+
def create_cookie(name, value, **kwargs):
|
456 |
+
"""Make a cookie from underspecified parameters.
|
457 |
+
|
458 |
+
By default, the pair of `name` and `value` will be set for the domain ''
|
459 |
+
and sent on every request (this is sometimes called a "supercookie").
|
460 |
+
"""
|
461 |
+
result = {
|
462 |
+
"version": 0,
|
463 |
+
"name": name,
|
464 |
+
"value": value,
|
465 |
+
"port": None,
|
466 |
+
"domain": "",
|
467 |
+
"path": "/",
|
468 |
+
"secure": False,
|
469 |
+
"expires": None,
|
470 |
+
"discard": True,
|
471 |
+
"comment": None,
|
472 |
+
"comment_url": None,
|
473 |
+
"rest": {"HttpOnly": None},
|
474 |
+
"rfc2109": False,
|
475 |
+
}
|
476 |
+
|
477 |
+
badargs = set(kwargs) - set(result)
|
478 |
+
if badargs:
|
479 |
+
raise TypeError(
|
480 |
+
f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
|
481 |
+
)
|
482 |
+
|
483 |
+
result.update(kwargs)
|
484 |
+
result["port_specified"] = bool(result["port"])
|
485 |
+
result["domain_specified"] = bool(result["domain"])
|
486 |
+
result["domain_initial_dot"] = result["domain"].startswith(".")
|
487 |
+
result["path_specified"] = bool(result["path"])
|
488 |
+
|
489 |
+
return cookielib.Cookie(**result)
|
490 |
+
|
491 |
+
|
492 |
+
def morsel_to_cookie(morsel):
|
493 |
+
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
494 |
+
|
495 |
+
expires = None
|
496 |
+
if morsel["max-age"]:
|
497 |
+
try:
|
498 |
+
expires = int(time.time() + int(morsel["max-age"]))
|
499 |
+
except ValueError:
|
500 |
+
raise TypeError(f"max-age: {morsel['max-age']} must be integer")
|
501 |
+
elif morsel["expires"]:
|
502 |
+
time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
|
503 |
+
expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
|
504 |
+
return create_cookie(
|
505 |
+
comment=morsel["comment"],
|
506 |
+
comment_url=bool(morsel["comment"]),
|
507 |
+
discard=False,
|
508 |
+
domain=morsel["domain"],
|
509 |
+
expires=expires,
|
510 |
+
name=morsel.key,
|
511 |
+
path=morsel["path"],
|
512 |
+
port=None,
|
513 |
+
rest={"HttpOnly": morsel["httponly"]},
|
514 |
+
rfc2109=False,
|
515 |
+
secure=bool(morsel["secure"]),
|
516 |
+
value=morsel.value,
|
517 |
+
version=morsel["version"] or 0,
|
518 |
+
)
|
519 |
+
|
520 |
+
|
521 |
+
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
|
522 |
+
"""Returns a CookieJar from a key/value dictionary.
|
523 |
+
|
524 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
525 |
+
:param cookiejar: (optional) A cookiejar to add the cookies to.
|
526 |
+
:param overwrite: (optional) If False, will not replace cookies
|
527 |
+
already in the jar with new ones.
|
528 |
+
:rtype: CookieJar
|
529 |
+
"""
|
530 |
+
if cookiejar is None:
|
531 |
+
cookiejar = RequestsCookieJar()
|
532 |
+
|
533 |
+
if cookie_dict is not None:
|
534 |
+
names_from_jar = [cookie.name for cookie in cookiejar]
|
535 |
+
for name in cookie_dict:
|
536 |
+
if overwrite or (name not in names_from_jar):
|
537 |
+
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
|
538 |
+
|
539 |
+
return cookiejar
|
540 |
+
|
541 |
+
|
542 |
+
def merge_cookies(cookiejar, cookies):
|
543 |
+
"""Add cookies to cookiejar and returns a merged CookieJar.
|
544 |
+
|
545 |
+
:param cookiejar: CookieJar object to add the cookies to.
|
546 |
+
:param cookies: Dictionary or CookieJar object to be added.
|
547 |
+
:rtype: CookieJar
|
548 |
+
"""
|
549 |
+
if not isinstance(cookiejar, cookielib.CookieJar):
|
550 |
+
raise ValueError("You can only merge into CookieJar")
|
551 |
+
|
552 |
+
if isinstance(cookies, dict):
|
553 |
+
cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
|
554 |
+
elif isinstance(cookies, cookielib.CookieJar):
|
555 |
+
try:
|
556 |
+
cookiejar.update(cookies)
|
557 |
+
except AttributeError:
|
558 |
+
for cookie_in_jar in cookies:
|
559 |
+
cookiejar.set_cookie(cookie_in_jar)
|
560 |
+
|
561 |
+
return cookiejar
|
meow/lib/python3.13/site-packages/requests/exceptions.py
ADDED
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
requests.exceptions
|
3 |
+
~~~~~~~~~~~~~~~~~~~
|
4 |
+
|
5 |
+
This module contains the set of Requests' exceptions.
|
6 |
+
"""
|
7 |
+
from urllib3.exceptions import HTTPError as BaseHTTPError
|
8 |
+
|
9 |
+
from .compat import JSONDecodeError as CompatJSONDecodeError
|
10 |
+
|
11 |
+
|
12 |
+
class RequestException(IOError):
|
13 |
+
"""There was an ambiguous exception that occurred while handling your
|
14 |
+
request.
|
15 |
+
"""
|
16 |
+
|
17 |
+
def __init__(self, *args, **kwargs):
|
18 |
+
"""Initialize RequestException with `request` and `response` objects."""
|
19 |
+
response = kwargs.pop("response", None)
|
20 |
+
self.response = response
|
21 |
+
self.request = kwargs.pop("request", None)
|
22 |
+
if response is not None and not self.request and hasattr(response, "request"):
|
23 |
+
self.request = self.response.request
|
24 |
+
super().__init__(*args, **kwargs)
|
25 |
+
|
26 |
+
|
27 |
+
class InvalidJSONError(RequestException):
|
28 |
+
"""A JSON error occurred."""
|
29 |
+
|
30 |
+
|
31 |
+
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
|
32 |
+
"""Couldn't decode the text into json"""
|
33 |
+
|
34 |
+
def __init__(self, *args, **kwargs):
|
35 |
+
"""
|
36 |
+
Construct the JSONDecodeError instance first with all
|
37 |
+
args. Then use it's args to construct the IOError so that
|
38 |
+
the json specific args aren't used as IOError specific args
|
39 |
+
and the error message from JSONDecodeError is preserved.
|
40 |
+
"""
|
41 |
+
CompatJSONDecodeError.__init__(self, *args)
|
42 |
+
InvalidJSONError.__init__(self, *self.args, **kwargs)
|
43 |
+
|
44 |
+
def __reduce__(self):
|
45 |
+
"""
|
46 |
+
The __reduce__ method called when pickling the object must
|
47 |
+
be the one from the JSONDecodeError (be it json/simplejson)
|
48 |
+
as it expects all the arguments for instantiation, not just
|
49 |
+
one like the IOError, and the MRO would by default call the
|
50 |
+
__reduce__ method from the IOError due to the inheritance order.
|
51 |
+
"""
|
52 |
+
return CompatJSONDecodeError.__reduce__(self)
|
53 |
+
|
54 |
+
|
55 |
+
class HTTPError(RequestException):
|
56 |
+
"""An HTTP error occurred."""
|
57 |
+
|
58 |
+
|
59 |
+
class ConnectionError(RequestException):
|
60 |
+
"""A Connection error occurred."""
|
61 |
+
|
62 |
+
|
63 |
+
class ProxyError(ConnectionError):
|
64 |
+
"""A proxy error occurred."""
|
65 |
+
|
66 |
+
|
67 |
+
class SSLError(ConnectionError):
|
68 |
+
"""An SSL error occurred."""
|
69 |
+
|
70 |
+
|
71 |
+
class Timeout(RequestException):
|
72 |
+
"""The request timed out.
|
73 |
+
|
74 |
+
Catching this error will catch both
|
75 |
+
:exc:`~requests.exceptions.ConnectTimeout` and
|
76 |
+
:exc:`~requests.exceptions.ReadTimeout` errors.
|
77 |
+
"""
|
78 |
+
|
79 |
+
|
80 |
+
class ConnectTimeout(ConnectionError, Timeout):
|
81 |
+
"""The request timed out while trying to connect to the remote server.
|
82 |
+
|
83 |
+
Requests that produced this error are safe to retry.
|
84 |
+
"""
|
85 |
+
|
86 |
+
|
87 |
+
class ReadTimeout(Timeout):
|
88 |
+
"""The server did not send any data in the allotted amount of time."""
|
89 |
+
|
90 |
+
|
91 |
+
class URLRequired(RequestException):
|
92 |
+
"""A valid URL is required to make a request."""
|
93 |
+
|
94 |
+
|
95 |
+
class TooManyRedirects(RequestException):
|
96 |
+
"""Too many redirects."""
|
97 |
+
|
98 |
+
|
99 |
+
class MissingSchema(RequestException, ValueError):
|
100 |
+
"""The URL scheme (e.g. http or https) is missing."""
|
101 |
+
|
102 |
+
|
103 |
+
class InvalidSchema(RequestException, ValueError):
|
104 |
+
"""The URL scheme provided is either invalid or unsupported."""
|
105 |
+
|
106 |
+
|
107 |
+
class InvalidURL(RequestException, ValueError):
|
108 |
+
"""The URL provided was somehow invalid."""
|
109 |
+
|
110 |
+
|
111 |
+
class InvalidHeader(RequestException, ValueError):
|
112 |
+
"""The header value provided was somehow invalid."""
|
113 |
+
|
114 |
+
|
115 |
+
class InvalidProxyURL(InvalidURL):
|
116 |
+
"""The proxy URL provided is invalid."""
|
117 |
+
|
118 |
+
|
119 |
+
class ChunkedEncodingError(RequestException):
|
120 |
+
"""The server declared chunked encoding but sent an invalid chunk."""
|
121 |
+
|
122 |
+
|
123 |
+
class ContentDecodingError(RequestException, BaseHTTPError):
|
124 |
+
"""Failed to decode response content."""
|
125 |
+
|
126 |
+
|
127 |
+
class StreamConsumedError(RequestException, TypeError):
|
128 |
+
"""The content for this response was already consumed."""
|
129 |
+
|
130 |
+
|
131 |
+
class RetryError(RequestException):
|
132 |
+
"""Custom retries logic failed"""
|
133 |
+
|
134 |
+
|
135 |
+
class UnrewindableBodyError(RequestException):
|
136 |
+
"""Requests encountered an error when trying to rewind a body."""
|
137 |
+
|
138 |
+
|
139 |
+
# Warnings
|
140 |
+
|
141 |
+
|
142 |
+
class RequestsWarning(Warning):
|
143 |
+
"""Base warning for Requests."""
|
144 |
+
|
145 |
+
|
146 |
+
class FileModeWarning(RequestsWarning, DeprecationWarning):
|
147 |
+
"""A file was opened in text mode, but Requests determined its binary length."""
|
148 |
+
|
149 |
+
|
150 |
+
class RequestsDependencyWarning(RequestsWarning):
|
151 |
+
"""An imported dependency doesn't match the expected version range."""
|
meow/lib/python3.13/site-packages/requests/help.py
ADDED
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Module containing bug report helper(s)."""
|
2 |
+
|
3 |
+
import json
|
4 |
+
import platform
|
5 |
+
import ssl
|
6 |
+
import sys
|
7 |
+
|
8 |
+
import idna
|
9 |
+
import urllib3
|
10 |
+
|
11 |
+
from . import __version__ as requests_version
|
12 |
+
|
13 |
+
try:
|
14 |
+
import charset_normalizer
|
15 |
+
except ImportError:
|
16 |
+
charset_normalizer = None
|
17 |
+
|
18 |
+
try:
|
19 |
+
import chardet
|
20 |
+
except ImportError:
|
21 |
+
chardet = None
|
22 |
+
|
23 |
+
try:
|
24 |
+
from urllib3.contrib import pyopenssl
|
25 |
+
except ImportError:
|
26 |
+
pyopenssl = None
|
27 |
+
OpenSSL = None
|
28 |
+
cryptography = None
|
29 |
+
else:
|
30 |
+
import cryptography
|
31 |
+
import OpenSSL
|
32 |
+
|
33 |
+
|
34 |
+
def _implementation():
|
35 |
+
"""Return a dict with the Python implementation and version.
|
36 |
+
|
37 |
+
Provide both the name and the version of the Python implementation
|
38 |
+
currently running. For example, on CPython 3.10.3 it will return
|
39 |
+
{'name': 'CPython', 'version': '3.10.3'}.
|
40 |
+
|
41 |
+
This function works best on CPython and PyPy: in particular, it probably
|
42 |
+
doesn't work for Jython or IronPython. Future investigation should be done
|
43 |
+
to work out the correct shape of the code for those platforms.
|
44 |
+
"""
|
45 |
+
implementation = platform.python_implementation()
|
46 |
+
|
47 |
+
if implementation == "CPython":
|
48 |
+
implementation_version = platform.python_version()
|
49 |
+
elif implementation == "PyPy":
|
50 |
+
implementation_version = "{}.{}.{}".format(
|
51 |
+
sys.pypy_version_info.major,
|
52 |
+
sys.pypy_version_info.minor,
|
53 |
+
sys.pypy_version_info.micro,
|
54 |
+
)
|
55 |
+
if sys.pypy_version_info.releaselevel != "final":
|
56 |
+
implementation_version = "".join(
|
57 |
+
[implementation_version, sys.pypy_version_info.releaselevel]
|
58 |
+
)
|
59 |
+
elif implementation == "Jython":
|
60 |
+
implementation_version = platform.python_version() # Complete Guess
|
61 |
+
elif implementation == "IronPython":
|
62 |
+
implementation_version = platform.python_version() # Complete Guess
|
63 |
+
else:
|
64 |
+
implementation_version = "Unknown"
|
65 |
+
|
66 |
+
return {"name": implementation, "version": implementation_version}
|
67 |
+
|
68 |
+
|
69 |
+
def info():
|
70 |
+
"""Generate information for a bug report."""
|
71 |
+
try:
|
72 |
+
platform_info = {
|
73 |
+
"system": platform.system(),
|
74 |
+
"release": platform.release(),
|
75 |
+
}
|
76 |
+
except OSError:
|
77 |
+
platform_info = {
|
78 |
+
"system": "Unknown",
|
79 |
+
"release": "Unknown",
|
80 |
+
}
|
81 |
+
|
82 |
+
implementation_info = _implementation()
|
83 |
+
urllib3_info = {"version": urllib3.__version__}
|
84 |
+
charset_normalizer_info = {"version": None}
|
85 |
+
chardet_info = {"version": None}
|
86 |
+
if charset_normalizer:
|
87 |
+
charset_normalizer_info = {"version": charset_normalizer.__version__}
|
88 |
+
if chardet:
|
89 |
+
chardet_info = {"version": chardet.__version__}
|
90 |
+
|
91 |
+
pyopenssl_info = {
|
92 |
+
"version": None,
|
93 |
+
"openssl_version": "",
|
94 |
+
}
|
95 |
+
if OpenSSL:
|
96 |
+
pyopenssl_info = {
|
97 |
+
"version": OpenSSL.__version__,
|
98 |
+
"openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
|
99 |
+
}
|
100 |
+
cryptography_info = {
|
101 |
+
"version": getattr(cryptography, "__version__", ""),
|
102 |
+
}
|
103 |
+
idna_info = {
|
104 |
+
"version": getattr(idna, "__version__", ""),
|
105 |
+
}
|
106 |
+
|
107 |
+
system_ssl = ssl.OPENSSL_VERSION_NUMBER
|
108 |
+
system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
|
109 |
+
|
110 |
+
return {
|
111 |
+
"platform": platform_info,
|
112 |
+
"implementation": implementation_info,
|
113 |
+
"system_ssl": system_ssl_info,
|
114 |
+
"using_pyopenssl": pyopenssl is not None,
|
115 |
+
"using_charset_normalizer": chardet is None,
|
116 |
+
"pyOpenSSL": pyopenssl_info,
|
117 |
+
"urllib3": urllib3_info,
|
118 |
+
"chardet": chardet_info,
|
119 |
+
"charset_normalizer": charset_normalizer_info,
|
120 |
+
"cryptography": cryptography_info,
|
121 |
+
"idna": idna_info,
|
122 |
+
"requests": {
|
123 |
+
"version": requests_version,
|
124 |
+
},
|
125 |
+
}
|
126 |
+
|
127 |
+
|
128 |
+
def main():
|
129 |
+
"""Pretty-print the bug information as JSON."""
|
130 |
+
print(json.dumps(info(), sort_keys=True, indent=2))
|
131 |
+
|
132 |
+
|
133 |
+
if __name__ == "__main__":
|
134 |
+
main()
|